def __upload_function_code()

in rdk/rdk.py [0:0]


    def __upload_function_code(self, rule_name, params, account_id, my_session, code_bucket_name):
        if params['SourceRuntime'] == "java8":
            #Do java build and package.
            print (f"[{my_session.region_name}]: Running Gradle Build for "+rule_name)
            working_dir = os.path.join(os.getcwd(), rules_dir, rule_name)
            command = ["gradle","build"]
            subprocess.call( command, cwd=working_dir)

            #set source as distribution zip
            s3_src = os.path.join(os.getcwd(), rules_dir, rule_name, 'build', 'distributions', rule_name+my_session.region_name+".zip")
            s3_dst = "/".join((rule_name, rule_name+".zip"))

            my_s3 = my_session.resource('s3')

            print (f"[{my_session.region_name}]: Uploading " + rule_name)
            my_s3.meta.client.upload_file(s3_src, code_bucket_name, s3_dst)
            print (f"[{my_session.region_name}]: Upload complete.")

        elif params['SourceRuntime'] in ["dotnetcore1.0","dotnetcore2.0"]:
            print ("Packaging "+rule_name)
            working_dir = os.path.join(os.getcwd(), rules_dir, rule_name)
            commands = [["dotnet","restore"]]

            app_runtime = "netcoreapp1.0"
            if params['SourceRuntime'] == "dotnetcore2.0":
                app_runtime = "netcoreapp2.0"

            commands.append(["dotnet","lambda","package","-c","Release","-f", app_runtime])

            for command in commands:
                subprocess.call( command, cwd=working_dir)

            # Remove old zip file if it already exists
            package_file_dst = os.path.join(rule_name, rule_name+".zip")
            self.__delete_package_file(package_file_dst)

            # Create new package in temp directory, copy to rule directory
            # This copy avoids the archiver trying to include the output zip in itself
            s3_src_dir = os.path.join(os.getcwd(),rules_dir, rule_name,'bin','Release', app_runtime, 'publish')
            tmp_src = shutil.make_archive(os.path.join(tempfile.gettempdir(), rule_name+my_session.region_name), 'zip', s3_src_dir)
            s3_dst = "/".join((rule_name, rule_name+".zip"))

            my_s3 = my_session.resource('s3')

            print (f"[{my_session.region_name}]: Uploading " + rule_name)
            my_s3.meta.client.upload_file(tmp_src, code_bucket_name, s3_dst)
            print (f"[{my_session.region_name}]: Upload complete.")
            if not(os.path.exists(package_file_dst)):
                shutil.copy(tmp_src, package_file_dst)
            self.__delete_package_file(tmp_src)

        else:
            print (f"[{my_session.region_name}]: Zipping " + rule_name)
            # Remove old zip file if it already exists
            package_file_dst = os.path.join(rule_name, rule_name+".zip")
            self.__delete_package_file(package_file_dst)

            #zip rule code files and upload to s3 bucket
            s3_src_dir = os.path.join(os.getcwd(), rules_dir, rule_name)

            tmp_src = shutil.make_archive(os.path.join(tempfile.gettempdir(), rule_name+my_session.region_name), 'zip', s3_src_dir)

            s3_dst = "/".join((rule_name, rule_name+".zip"))

            my_s3 = my_session.resource('s3')

            print (f"[{my_session.region_name}]: Uploading " + rule_name)
            my_s3.meta.client.upload_file(tmp_src, code_bucket_name, s3_dst)
            print (f"[{my_session.region_name}]: Upload complete.")
            if not(os.path.exists(package_file_dst)):
                shutil.copy(tmp_src, package_file_dst)
            self.__delete_package_file(tmp_src)

        return s3_dst