def main()

in src/workspaces/cookiefactory/setup_content/__main__.py [0:0]


def main():
    args = parse_args()

    # helper function to build paths to files nested within the passed in content root
    content_root = os.path.join(os.path.dirname(__file__), args.content_root)

    def content_path(relative_path):
        return os.path.join(content_root, relative_path)

    # this helper object does all the heavy lifting on workspaces for us
    ws = deploy_utils.WorkspaceUtils(
                workspace_id=args.workspace_id,
                region_name=args.region_name,
                endpoint_url=args.endpoint_url,
                profile=args.profile)

    content_start_time = get_content_start_time(ws, args.content_start_time)
    content_start_time_ms = int(round(content_start_time.timestamp()*1000))
    timestamp_string = datetime.datetime.fromtimestamp(content_start_time.timestamp(), datetime.timezone.utc).strftime('%Y-%m-%d %H:%M:%S %Z')
    print(f"using following timestamp for data ingestion: {timestamp_string} ({content_start_time_ms}ms from epoch)")

    # this helper object does all the heavy lifting timestream telemetry for us
    telemetry = timestream_libs.TimestreamTelemetryImporter(
                region_name=args.region_name,
                database_name=args.telemetry_database_name,
                table_name=args.telemetry_table_name,
                stack_name=args.telemetry_stack_name,
                profile=args.profile)

    ####################################################################
    # We run teardown steps first.
    # Delete entities (must happen before delete types)
    if args.delete_entities or args.delete_all:
        print('Deleting entities...')
        ws.delete_all_entities()

    # Delete component types
    if args.delete_component_types or args.delete_all:
        ws.delete_all_component_types()

    # Delete scenes
    if args.delete_scenes or args.delete_all:
        print('Deleting scenes...')
        ws.delete_all_scenes()

    # Delete models - this will be handled by bucket cleanup
    if args.delete_models or args.delete_all:
        print('Deleting models...')
        ws.delete_resource(destination='CookieFactoryMixer.glb')
        ws.delete_resource(destination='CookieFactoryMixer.glb')
        ws.delete_resource(destination='CookieFactoryWaterTank.glb')
        ws.delete_resource(destination='CookieFactoryLine.glb')
        ws.delete_resource(destination='CookieFactoryEnvironment.glb')

    # Delete telemetry data
    if args.delete_telemetry or args.delete_all:
        print('Deleting sample telemetry data...')
        telemetry.recreate_table()

    # Sample video data may be shared by multiple samples so we don't delete it, instead we rely on KVS retention period

    # deleting workspace requires deleting all content so ensure that flag is also set
    # workspace is not created as part of this setup script so have it be explicit separate flag
    if args.delete_all and args.delete_workspace_role_and_bucket:
        print('Deleting workspace role and bucket...')
        ws.delete_workspace_role_and_bucket()

    ####################################################################
    # Now run import steps.  Skipped steps above may cause failures.

    # Import component types
    if args.import_component_types or args.import_all:
        print('Importing component types...')
        ws.import_component_type(content_path('../../modules/timestream_telemetry/component-types/timestream_component_type.json'), telemetry.lambda_arn)
        ws.import_component_type(content_path('component_types/alarm_component_type.json'), telemetry.lambda_arn)
        ws.import_component_type(content_path('component_types/mixer_component_type.json'))
        ws.import_component_type(content_path('component_types/watertank_component_type.json'))
        ws.import_component_type(content_path('component_types/space_component_type.json'))

    # Import entities
    if args.import_entities or args.import_all:
        print('Importing entities...')
        ws.import_entities(content_path('entities/entities.json'))

    # Import scenes
    if args.import_scenes or args.import_all:
        print('Importing scenes...')
        ws.import_scene(file_name=content_path('scenes/CookieFactory.json'), scene_name='CookieFactory')

    # Import models
    if args.import_models or args.import_all:
        print('Importing models...')
        ws.import_resource(file_name=content_path('scenes/CookieFactoryEnvironment.glb'), destination='CookieFactoryMixer.glb')
        ws.import_resource(file_name=content_path('scenes/CookieFactoryMixer.glb'), destination='CookieFactoryMixer.glb')
        ws.import_resource(file_name=content_path('scenes/CookieFactoryWaterTank.glb'), destination='CookieFactoryWaterTank.glb')
        ws.import_resource(file_name=content_path('scenes/CookieFactoryLine.glb'), destination='CookieFactoryLine.glb')
        ws.import_resource(file_name=content_path('scenes/CookieFactoryEnvironment.glb'), destination='CookieFactoryEnvironment.glb')

    # Import telemetry data
    if args.import_telemetry or args.import_all:
        print('Importing sample telemetry data...')
        telemetry.recreate_table()
        telemetry.import_csv(content_path('sample_data/telemetry/telemetry.csv'), rebase_time_ms=content_start_time_ms)

    # Import video data
    if args.import_video or args.import_all:
        print('Importing video data...')
        video_utils = deploy_utils.VideoUtils(args.region_name, profile=args.profile)
        video_dir = content_path('sample_data/video')
        kvs_stream_names = video_utils.upload_all_mkv_files(dir_name=video_dir, rebase_time_ms=content_start_time_ms)