# Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# 	http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied.  See the License for the
# specific language governing permissions and limitations
# under the License.

import pytest

from pytest_rally.process import run_command_with_output
from pytest_rally.rally import Rally

from pytest_rally.fixtures import *

@pytest.hookimpl
def pytest_addoption(parser):
    group = parser.getgroup('rally')
    group.addoption("--generate-tests-for-class",
                    action="store",
                    dest="test_class",
                    default="TestTrackRepository",
                    help=("Name of the class for which to auto-generate tests\n"
                          "(default: `TestTrackRepository`)"))
    group.addoption("--distribution-version",
                    action="store",
                    default=None,
                    help=("Download and run tests with a specific ES release\n"
                          "(default: None)\n"
                          "(mutually exclusive with: `--es-revision`)"))
    group.addoption("--revision",
                    action="store",
                    default=None,
                    help=("Build and run tests with a specific ES commit\n"
                          "(default: 'current')\n"
                          "(mutually exclusive with: `--distribution-version`)"))
    group.addoption("--skip-autogenerated-tests",
                    action="store_true",
                    default=False,
                    help=("If provided, auto-generated tests will be skipped\n"
                          "(default: False)"))
    group.addoption("--debug-rally",
                    action="store_true",
                    default=False,
                    help=("If provided, Rally commands will just be logged, not executed."))

@pytest.hookimpl
def pytest_cmdline_main(config):
    def current_branch(repo):
        # This works around `git branch --show-current` not being available
        # in older versions of Git
        cmd = f'git -C {repo} branch'
        branches = run_command_with_output(cmd).split("\n")
        current = next(filter(lambda b: b.startswith("*"), branches))
        if "detached" in current:
            return run_command_with_output(f'git -C {repo} rev-parse HEAD').rstrip("\n")
        else:
            return current.split()[1].strip()

    repo = config.getoption("--track-repository", str(config.rootdir))
    rev = config.getoption("--track-revision", current_branch(repo))

    config.option.track_repository = repo
    config.option.track_revision = rev

def validate_options(config):
    if config.option.distribution_version and config.option.revision:
        pytest.fail(msg="--distribution-version and --es-revision are mutually exclusive.", pytrace=False)

def configure_markers(config):
    config.addinivalue_line("markers", "autogenerated: mark test as autogenerated")

def configure_rally(config):
    r = Rally(config.option.track_repository,
              config.option.track_revision,
              debug=config.option.debug_rally)
    r.configure()
    config.option.rally = r

@pytest.hookimpl
def pytest_configure(config):
    validate_options(config)
    configure_markers(config)
    configure_rally(config)

def default_params(track, challenge):
    return pytest.param(track, challenge, {}, id=f"{track}-{challenge}")

@pytest.hookimpl
def pytest_generate_tests(metafunc):
    repo = metafunc.config.getoption('track_repository')
    rev = metafunc.config.getoption('track_revision')
    current_class = metafunc.cls
    desired_class = metafunc.config.option.test_class

    if current_class is not None and current_class.__name__ == desired_class:
        if "track" and "challenge" in metafunc.fixturenames:
            r = metafunc.config.option.rally
            params = []
            tracks_and_challenges = r.all_tracks_and_challenges()
            for track, challenges in tracks_and_challenges:
                params += [(default_params(track, challenge)) for challenge in challenges]
            metafunc.parametrize("track,challenge,rally_options", params)
            metafunc.definition.parent.add_marker("autogenerated")

@pytest.hookimpl
def pytest_runtest_setup(item):
    markers = [m.name for m in item.iter_markers()]
    if "autogenerated" in markers:
        if item.config.getoption("--skip-autogenerated-tests"):
            pytest.skip(msg="--skip-autogenerated-tests flag was set")
