import os
import glob
import subprocess
from invoke import task, env
from pycapo import CapoConfig

from deployment.maven import mvn
import deployment.descriptor as descriptor
import deployment.git as git


@task(name='local')
def localmachine(c, quiet=False):
    """Activate the local profile"""
    env.descriptor = descriptor.load('local')
    env.quiet = quiet


@task
def dev(c, force=False, quiet=False):
    """Activate the development profile (webtest)"""
    env.descriptor = descriptor.load('dev')
    env.quiet = quiet


@task
def test(c, force=False, quiet=False):
    """Activate the test profile (archive-test)"""
    git.ensure_clean(c, force=force)
    env.descriptor = descriptor.load('test')
    env.quiet = quiet


@task
def production(c, force=False, quiet=False):
    """Activate the production profile (archive-new)"""
    git.ensure_clean(c, force=force)
    git.ensure_tagged(c, force=force)
    env.descriptor = descriptor.load('production')
    env.quiet = quiet


# ---------------------------------------------------------------------------
#
#        U T I L I T I E S
#
# ---------------------------------------------------------------------------
def ensure_built(c, *modules, **kwargs):
    if 'already_built' not in kwargs or not kwargs['already_built']:
        mvn(c, 'package', modules=modules)


# ---------------------------------------------------------------------------
#
#        PYAT
#
# ---------------------------------------------------------------------------
@task(name='get-version')
def get_version(c):
    """Gets the version out of pyat"""
    pyat_package = {}
    with open("pyat/pyat/_version.py") as fp:
        exec(fp.read(), pyat_package)
        return pyat_package['___version___']


@task(name='build-pyat')
def build_pyat(c):
    """Build PYAT"""
    with c.cd('pyat'):
        print("Running pip install -r requirements.txt")
        c.run(f'pip {"-q" if env.quiet else ""} install -r requirements.txt')
        c.run('rm -rf dist')
        c.run('python setup.py sdist')


@task(name='install-pyat-into-venv')
def install_pyat(c):
    with c.cd('pyat'):
        c.run('python setup.py install')


# ---------------------------------------------------------------------------
#
#        F R O N T - E N D
#
# ---------------------------------------------------------------------------
@task(name='build-front-end')
def build_front_end(c):
    for loc in env.descriptor.frontend:
        name = loc.host.replace('webapps@', '')
    try:
        with open('archiveIface/src/env.js', 'w', encoding='utf8') as f:
            f.write(f'const SERVICEAPIURL = "https://{name}/archiveServices/";\n')
            f.write(f'const CURRENT_RELEASE_NUMBER = "{get_version(c)}"')
        f.close()
    except Exception as x:
        print(f'Failed to write out front end env.js for deployment service layer url')
        raise x

    with c.cd('archiveIface'):
        c.run('rm -rf portal/*')
        c.run('npm install')
        c.run('npm run build')
        with c.cd('portal'):
            c.run('tar cf portal.tar.gz *')


@task(name='post-a-pre-deployment-message')
def pre_deployment_message(c):
    for loc in env.descriptor.frontend:
        name = loc.host.replace('webapps@', '')
        with loc.connection() as site:
            with site.cd(f'/home/{name}/content/portal/js'):
                site.run(
                    f'sed -i "s/SHOW_DEPLOYMENT_MSG = false/SHOW_DEPLOYMENT_MSG = true/g" MotD.js')


@task(name='remove-deployment-message')
def all_clear_remove_deployment_msg(c):
    for loc in env.descriptor.frontend:
        name = loc.host.replace('webapps@', '')
        with loc.connection() as site:
            with site.cd(f'/home/{name}/content/portal/js'):
                site.run(
                    f'sed -i "s/SHOW_DEPLOYMENT_MSG = true/SHOW_DEPLOYMENT_MSG = false/g" MotD.js')


@task(name='deploy-fe')
def deploy_fe(c):
    build_front_end(c)

    for loc in env.descriptor.frontend:
        name = loc.host.replace('webapps@', '')
        with loc.connection() as site:
            with site.cd(f'/home/{name}/content'):
                site.run('rm -rf portal/*')
            site.put('archiveIface/portal/portal.tar.gz',
                     remote=f'/home/{name}/content/portal/portal.tar.gz')
            with site.cd(f'/home/{name}/content/portal'):
                site.run('tar xf portal.tar.gz')
                site.run('rm portal.tar.gz')
            site.run(f'chmod -R 755 /home/{name}/content/portal')

    # After everything is out, revert the env.js file: 
    c.run('git checkout archiveIface/src/env.js')


# ---------------------------------------------------------------------------
#
#        A R C H I V E   S E R V I C E S
#
# ---------------------------------------------------------------------------
@task(name='build-services')
def build_services(c):
    """Build services"""
    with c.cd('services'):
        print("Running pip install -r requirements.txt")
        # c.run(f'pip {"-q" if env.quiet else ""} install -r requirements.txt')
        c.run('rm -rf dist')
        c.run('python setup.py sdist')


@task(name='archive-service')
def deploy_archive_service(c):
    build_pyat(c)
    build_services(c)

    for host in env.descriptor.frontend:
        with host.connection() as site:
            try:
                site.put('pyat/dist/pyat-{0}.tar.gz'.format(get_version(c)),
                         remote='archive-service/')
                site.put('services/dist/services-{0}.tar.gz'.format(get_version(c)),
                         remote='archive-service/')
                with site.cd('./archive-service'):
                    # Get current soft link targets on server
                    curr_link = site.run('readlink -f current').stdout.strip()
                    curr_pyat_link = site.run('readlink -f current-pyat').stdout.strip()
                    # Empty their contents,  delete the folders they pointed to and
                    # the links themselves
                    site.run('rm -rf current/*')
                    site.run('rm -rf current-pyat/*')
                    site.run('rm -rf {0}'.format(curr_link))
                    site.run('rm -rf {0}'.format(curr_pyat_link))
                    site.run('rm -f current')
                    site.run('rm -f current-pyat')
                    # Untar the new build and re-map the soft links
                    site.run('tar xvf {0}'.format('services-{0}.tar.gz'.format(get_version(c))))
                    site.run('tar xvf {0}'.format('pyat-{0}.tar.gz'.format(get_version(c))))
                    site.run('ln -s {0} current'.format('services-{0}'.format(get_version(c))))
                    site.run('ln -s {0} current-pyat'.format('pyat-{0}'.format(get_version(c))))
                    # Activate the virtualenv on the server side
                    with site.cd('current-pyat'):
                        site.run('source ../bin/activate && python setup.py install')
                    with site.cd('current'):
                        site.run('source ../bin/activate && python setup.py install')
                    # Remove old tar balls.
                    site.run('rm *.tar.gz')
                site.run('chmod -R 755 ./archive-service')
                site.run('sudo /etc/init.d/nrao-apache restart')

            except Exception as ex:
                print('deploy : {0}'.format(ex))
            print('deploying services v.{0} to {1}'.format(get_version(c), host.host))


# ---------------------------------------------------------------------------
#
#        S O L R
#
# ---------------------------------------------------------------------------
@task(name='solr-indexer')
def solr_indexer(c, **kwargs):
    """Deploys the general solr indexer"""
    ensure_built(c, 'archive-solr', **kwargs)

    # figure out the Capo profiles
    for host in env.descriptor.solr:
        dir = host.capo['solr-indexer.deployment.directory']

        with host.connection() as conn:
            try:
                conn.sftp().mkdir(dir)
                print(f'Creating {dir} on {host.host}')
            except OSError:
                # the directory already exists
                pass
            path = glob.glob('archive-solr/target/*-capsule.jar')[0]
            print(f'Uploading archive-solr.jar to {host.host}')
            conn.put(path, dir + "/archive-solr.jar")
            conn.put('archive-solr/src/main/scripts/runMulticoreIndexer.sh',
                     "periodic-indexer/runMulticoreIndexer.sh")
            conn.sftp().chmod("periodic-indexer/runMulticoreIndexer.sh", 0o755)


@task(name='workflow-solr-indexer')
def workflow_solr_indexer(c, **kwargs):
    """Deploys the workflow's solr indexer"""
    if 'already_built' not in kwargs or not kwargs['already_built']:
        mvn(c, 'package', modules=['archive-solr'])

    # figure out the Capo profiles
    for host in env.descriptor.workflow:
        with host.connection() as conn:
            dir = host.capo['workflow.deployment.directory']

            print(f'Deploying archive-solr.jar and runMulticoreIndexer.sh to {host.host}')
            conn.put(glob.glob('archive-solr/target/*-capsule.jar')[0],
                     dir + "/bin/archive-solr.jar")
            conn.put('archive-solr/src/main/scripts/runMulticoreIndexer.sh',
                     dir + "/bin/runMulticoreIndexer.sh")
            conn.sftp().chmod(dir + "/bin/runMulticoreIndexer.sh", 0o755)


# ---------------------------------------------------------------------------
#
#        R E Q U E S T   H A N D L E R
#
# ---------------------------------------------------------------------------
@task
def requesthandler(c, **kwargs):
    """Deploys the request handler (RH)"""
    ensure_built(c, 'NGRH-ALMA-10_8/rh_web', **kwargs)
    for host in env.descriptor.web:
        host.war_deploy(c, 'NGRH-ALMA-10_8/rh_web/target/rh.war')


# ---------------------------------------------------------------------------
#
#        W O R K F L O W
#
# ---------------------------------------------------------------------------
@task
def workflow_jobs(c, **kwargs):
    """Deploys the workflow jobs jar file"""
    # first build the damn thing
    ensure_built(c, 'workflow-all/workflow-job-runner', **kwargs)

    for host in env.descriptor.workflow:
        with host.connection() as conn:
            dir = host.capo['workflow.deployment.directory']

            print(f'Uploading workflow-jobs.jar and job-runner.sh to {dir} on {host.host}')
            conn.put(glob.glob("workflow-all/workflow-job-runner/target/*-capsule.jar")[0],
                     dir + "/bin/workflow-jobs.jar")
            conn.put("workflow-all/workflow-job-runner/src/main/sh/job-runner.sh",
                     dir + "/bin/job-runner.sh")
            conn.sftp().chmod(dir + "/bin/job-runner.sh", 0o0755)


@task(name='workflow-server')
def workflow_server(c, **kwargs):
    """Deploys the workflow server"""
    ensure_built(c, 'workflow-all/workflow-servlet', **kwargs)
    for host in env.descriptor.workflow:
        host.war_deploy(c, 'workflow-all/workflow-servlet/target/workflow.war')


@task(name='workflow-pyat')
def workflow_pyat(c, **kwargs):
    """Deploys pyat to the workflow area"""
    build_pyat(c, **kwargs)
    for host in env.descriptor.workflow:
        dir = host.capo['workflow.deployment.directory']
        with host.connection() as site:
            site.put(glob.glob('pyat/dist/*.tar.gz')[0], 'pyat.tar.gz')
            print("Running pip install pyat.tar.gz")
            # Do not use the --ignore-installed option.  That can leave abandoned files
            # from previous versions in the environment.  If you want to be extra sure of your
            # libraries, you can use -force-reinstall
            # See: https://github.com/pypa/pip/issues/5020
            site.run(dir + f'/bin/pip {"-q" if env.quiet else ""} install pyat.tar.gz')
            site.run('rm -f pyat.tar.gz')
            site.run('chmod o+rx ' + dir + '/bin/* | true')

            site.run('chmod -R o+rX ' + dir + '/lib | true')

            # epilogue scripts must not be group writable or qsub will not execute them!
            site.run('chmod g-w ' + dir + '/bin/epilogue')
            site.run('chmod g-w ' + dir + '/bin/logwrapper')
            site.run('chmod g-w ' + dir + '/bin/ingest | true')


@task
def workflow(c, **kwargs):
    """Deploys the workflow in its entirety"""
    ensure_built(c,
                 'workflow-all/workflow-job-runner',
                 'workflow-all/workflow-servlet',
                 **kwargs)
    workflow_pyat(c)
    workflow_jobs(c, already_built=True)
    workflow_server(c, already_built=True)


# ---------------------------------------------------------------------------
#
#        A M Y G D A L A
#
# ---------------------------------------------------------------------------


@task
def amygdala(c, **kwargs):
    """Deploys amygdala"""
    ensure_built(c, 'amygdala', **kwargs)
    for host in env.descriptor.web:
        host.war_deploy(c, 'amygdala')


# ---------------------------------------------------------------------------
#
#        D A T A F E T C H E R
#
# ---------------------------------------------------------------------------


@task
def datafetcher(c, **kwargs):
    """Deploys the data fetcher"""
    # first build the damn thing
    ensure_built(c, 'data-fetcher', **kwargs)

    for host in env.descriptor.workflow:
        with host.connection() as conn:
            dir = host.capo['workflow.deployment.directory']

            # now do the deploy steps
            print(f'Deploying data-fetcher to {host.host}')
            conn.put(glob.glob("data-fetcher/target/*-capsule.jar")[0],
                     dir + "/bin/data-fetcher.jar")
            conn.put("data-fetcher/src/main/sh/data-fetcher.sh",
                     dir + "/bin/data-fetcher.sh")
            conn.sftp().chmod(dir + "/bin/data-fetcher.sh", 0o0755)


# ---------------------------------------------------------------------------
#
#        D A T A B A S E   M A I N T E N A N C E
#
# ---------------------------------------------------------------------------


@task
def updatedb(c):
    """
    Update the schema to the latest version using liquibase.
    """
    settings = env.descriptor.database_settings()
    with c.cd('schema'):
        c.run('mvn liquibase:update '
              '-Dliquibase.changeLogFile=liquibase/liquibase.xml '
              '-Dliquibase.promptOnNonLocalDatabase=false '
              f'-Dliquibase.driver={settings.jdbcDriver} '
              f'-Dliquibase.url={settings.jdbcUrl} '
              f'-Dliquibase.username={settings.jdbcUsername} '
              f'-Dliquibase.password={settings.jdbcPassword}',
              pty=True)


@task
def setdbbasis(c):
    """
    Marks the database as having already executed the archive-3.2 and archive-3.5 (initial)
    schema migrations.

    Useful for the first deployment after setting up liquibase
    """
    settings = env.descriptor.database_settings()
    with c.cd('schema'):
        c.run('mvn liquibase:changelogSync '
              '-Dliquibase.changeLogFile=liquibase/migrations/archive-3.2.xml '
              f'-Dliquibase.driver={settings.jdbcDriver} '
              f'-Dliquibase.url={settings.jdbcUrl} '
              f'-Dliquibase.username={settings.jdbcUsername} '
              f'-Dliquibase.password={settings.jdbcPassword}',
              pty=True)
        c.run('mvn liquibase:changelogSync '
              '-Dliquibase.changeLogFile=liquibase/migrations/archive-3.5.xml '
              f'-Dliquibase.driver={settings.jdbcDriver} '
              f'-Dliquibase.url={settings.jdbcUrl} '
              f'-Dliquibase.username={settings.jdbcUsername} '
              f'-Dliquibase.password={settings.jdbcPassword}',
              pty=True)
        c.run('mvn liquibase:changelogSync '
              '-Dliquibase.changeLogFile=liquibase/migrations/archive-3.6.xml '
              f'-Dliquibase.driver={settings.jdbcDriver} '
              f'-Dliquibase.url={settings.jdbcUrl} '
              f'-Dliquibase.username={settings.jdbcUsername} '
              f'-Dliquibase.password={settings.jdbcPassword}',
              pty=True)


@task
def diffdb(c):
    """
    Outputs a diff between prod and test in liquibase format.

    Use this if you have made changes to webtest and need to save them as a
    liquibase migration. You may need to edit if other changes are in-flight.
    """
    prod = CapoConfig(profile='nmprod').settings('metadataDatabase')
    test = CapoConfig(profile='nmtest').settings('metadataDatabase')
    with c.cd('schema'):
        c.run('mvn liquibase:diff '
              '-Dliquibase.changeLogFile=foo '
              '-Dliquibase.diffChangeLogFile=diff.xml '
              f'-Dliquibase.driver={test.jdbcDriver} '
              f'-Dliquibase.url={test.jdbcUrl} '
              f'-Dliquibase.username={test.jdbcUsername} '
              f'-Dliquibase.password={test.jdbcPassword} '
              f'-Dliquibase.referenceDriver={prod.jdbcDriver} '
              f'-Dliquibase.referenceUrl={prod.jdbcUrl} '
              f'-Dliquibase.referenceUsername={prod.jdbcUsername} '
              f'-Dliquibase.referencePassword={prod.jdbcPassword}',
              pty=True)


# ---------------------------------------------------------------------------
#
#        E V E R Y T H I N G
#
# ---------------------------------------------------------------------------
@task
def ousimport(c):
    install_pyat(c)

    print(f'Running ous-importer with profile {env.descriptor.database_capo_profile()}')
    newenv = os.environ.copy()
    newenv['CAPO_PROFILE'] = env.descriptor.database_capo_profile()
    subprocess.run('ous-importer', env=newenv)


@task
def everything(c):
    """Deploys EVERYTHING!!!"""
    # First, let's post a deployment message to the FE MotD file
    pre_deployment_message(c)

    # Next, let's update liquibase
    updatedb(c)

    # build everything
    mvn(c, 'clean', 'package')

    # now deploy everything
    deploy_fe(c)
    deploy_archive_service(c)

    requesthandler(c, already_built=True)
    solr_indexer(c, already_built=True)
    workflow_solr_indexer(c, already_built=True)
    datafetcher(c, already_built=True)
    workflow(c, already_built=True)
    amygdala(c, already_built=True)


@task
def all_clear(c):
    # When we're sure the deployment is done, we can remove the deployment
    # message from the MotD
    all_clear_remove_deployment_msg(c)

# do this up-front so we can pretend we're in the root of the project
os.chdir("..")