def configure_examples()

in bigtop-packages/src/charm/spark/layer-spark/lib/charms/layer/bigtop_spark.py [0:0]


    def configure_examples(self):
        """
        Install sparkpi.sh and sample data to /home/ubuntu.

        The sparkpi.sh script demonstrates spark-submit with the SparkPi class
        included with Spark. This small script is packed into the spark charm
        source in the ./scripts subdirectory.

        The sample data is used for benchmarks (only PageRank for now). This
        may grow quite large in the future, so we utilize Juju Resources for
        getting this data onto the unit. Sample data originated as follows:

        - PageRank: https://snap.stanford.edu/data/web-Google.html
        """
        # Handle sparkpi.sh
        script_source = 'scripts/sparkpi.sh'
        script_path = Path(script_source)
        if script_path.exists():
            script_target = '/home/ubuntu/sparkpi.sh'
            new_hash = host.file_hash(script_source)
            old_hash = unitdata.kv().get('sparkpi.hash')
            if new_hash != old_hash:
                hookenv.log('Installing SparkPi script')
                script_path.copy(script_target)
                Path(script_target).chmod(0o755)
                Path(script_target).chown('ubuntu', 'hadoop')
                unitdata.kv().set('sparkpi.hash', new_hash)
                hookenv.log('SparkPi script was installed successfully')

        # Handle sample data
        sample_source = hookenv.resource_get('sample-data')
        sample_path = sample_source and Path(sample_source)
        if sample_path and sample_path.exists() and sample_path.stat().st_size:
            sample_target = '/home/ubuntu'
            new_hash = host.file_hash(sample_source)
            old_hash = unitdata.kv().get('sample-data.hash')
            if new_hash != old_hash:
                hookenv.log('Extracting Spark sample data')
                # Extract the sample data; since sample data does not impact
                # functionality, log any extraction error but don't fail.
                try:
                    archive.extract(sample_path, destpath=sample_target)
                except Exception:
                    hookenv.log('Unable to extract Spark sample data: {}'
                                .format(sample_path))
                else:
                    unitdata.kv().set('sample-data.hash', new_hash)
                    hookenv.log('Spark sample data was extracted successfully')