in testSuite/cmd/testblobFS.go [173:330]
func (tbfsc TestBlobFSCommand) verifyRemoteDir() {
// Get BFS url parts to test SAS
datalakeURLParts, err := azdatalake.ParseURL(tbfsc.Subject)
if err != nil {
fmt.Println("error parsing the datalake sas ", tbfsc.Subject)
os.Exit(1)
}
// break the remote Url into parts
// and save the directory path
currentDirectoryPath := datalakeURLParts.PathName
datalakeURLParts.PathName = ""
// Get the Account Name and Key variables from environment
name := os.Getenv("ACCOUNT_NAME")
key := os.Getenv("ACCOUNT_KEY")
// If ACCOUNT_NAME or ACCOUNT_KEY is not supplied AND a SAS is not supplied
if (name == "" && key == "") && datalakeURLParts.SAS.Encode() == "" {
fmt.Println("ACCOUNT_NAME and ACCOUNT_KEY should be set before executing the test, OR a SAS token should be supplied in the subject URL.")
os.Exit(1)
}
var fsc *filesystem.Client
ctx := context.Background()
if datalakeURLParts.SAS.Encode() != "" {
fsc, err = filesystem.NewClientWithNoCredential(datalakeURLParts.String(), nil)
} else {
var cred *azdatalake.SharedKeyCredential
cred, err = azdatalake.NewSharedKeyCredential(name, key)
if err != nil {
fmt.Printf("error creating shared key cred. failed with error %s\n", err.Error())
os.Exit(1)
}
perCallPolicies := []policy.Policy{ste.NewVersionPolicy()}
fsc, err = filesystem.NewClientWithSharedKeyCredential(datalakeURLParts.String(), cred, &filesystem.ClientOptions{ClientOptions: azcore.ClientOptions{PerCallPolicies: perCallPolicies}})
ctx = context.WithValue(ctx, ste.ServiceAPIVersionOverride, ste.DefaultServiceApiVersion)
}
if err != nil {
fmt.Printf("error creating client. failed with error %s\n", err.Error())
os.Exit(1)
}
// Get the object Info and If the object is not a directory
// validation fails since validation has two be done between directories
// local and remote
objectInfo, err := os.Stat(tbfsc.Object)
if err != nil {
fmt.Printf("error getting the file info for dir %s. failed with error %s\n", tbfsc.Object, err.Error())
os.Exit(1)
}
if !objectInfo.IsDir() {
fmt.Printf("the source provided %s is not a directory path\n", tbfsc.Object)
os.Exit(1)
}
// List the directory
pager := fsc.NewListPathsPager(true, &filesystem.ListPathsOptions{Prefix: ¤tDirectoryPath})
// numberOfFilesinSubject keeps the count of number of files of at the destination
numberOfFilesinSubject := int(0)
for pager.More() {
resp, err := pager.NextPage(ctx)
if err != nil {
fmt.Printf("error listing the directory path defined by url %s. Failed with error %s\n", datalakeURLParts.String(), err.Error())
os.Exit(1)
}
paths := resp.PathList.Paths
numberOfFilesinSubject += len(paths)
for _, p := range paths {
// Get the file path
// remove the directory path from the file path
// to get the relative path
filePath := *p.Name
filePath = strings.Replace(filePath, currentDirectoryPath, "", 1)
relativefilepath := strings.Trim(filePath, "/")
// replace the "/" with os path separator
relativefilepath = strings.Replace(relativefilepath, "/", string(os.PathSeparator), -1)
// create the expected local path of remote file
filepathLocal := filepath.Join(tbfsc.Object, relativefilepath)
// open the filePath locally and calculate the md5
fpLocal, err := os.Open(filepathLocal)
if err != nil {
fmt.Printf("error opening the file %s. failed with error %s\n", filepathLocal, err.Error())
os.Exit(1)
}
// Get the fileInfo to get size.
fpLocalInfo, err := fpLocal.Stat()
if err != nil {
fmt.Printf("error getting the file info for file %s. failed with error %s\n", filepathLocal, err.Error())
os.Exit(1)
}
// Check the size of file
// If the size of file doesn't matches, then exit with error
if fpLocalInfo.Size() != *p.ContentLength {
fmt.Println("the size of local file does not match the remote file")
os.Exit(1)
}
// If the size of file is zero then continue to next file
if fpLocalInfo.Size() == 0 {
continue
}
defer fpLocal.Close()
// memory map the file
fpMMf, err := NewMMF(fpLocal, false, 0, fpLocalInfo.Size())
if err != nil {
fmt.Printf("error memory mapping the file %s. failed with error %s\n", filepathLocal, err.Error())
os.Exit(1)
}
defer fpMMf.Unmap()
// calculated the source md5
objMd5 := md5.Sum(fpMMf)
// Download the remote file and calculate md5
tempUrlParts := datalakeURLParts
tempUrlParts.PathName = *p.Name
fc := fsc.NewFileClient(tempUrlParts.PathName)
fResp, err := fc.DownloadStream(ctx, nil)
if err != nil {
fmt.Printf("error downloading the file %s. failed with error %s\n", fc.DFSURL(), err.Error())
os.Exit(1)
}
downloadedBuffer := make([]byte, *p.ContentLength) // byte buffer in which file will be downloaded to
_, err = io.ReadFull(fResp.Body, downloadedBuffer)
if err != nil {
fmt.Println("error reading the downloaded body ", err.Error())
os.Exit(1)
}
// calculate the downloaded file Md5
subjMd5 := md5.Sum(downloadedBuffer)
if objMd5 != subjMd5 {
fmt.Printf("source file %s doesn't match the remote file %s\n", filepathLocal, fc.DFSURL())
os.Exit(1)
}
}
}
// walk through the directory and count the number of files inside the local directory
numberOFFilesInObject := int(0)
err = filepath.Walk(tbfsc.Object, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() {
numberOFFilesInObject++
}
return nil
})
if err != nil {
fmt.Printf("validation failed with error %s walking inside the source %s\n", err.Error(), tbfsc.Object)
os.Exit(1)
}
// If the number of files inside the directories locally and remote
// is not same, validation fails.
if numberOFFilesInObject != numberOfFilesinSubject {
fmt.Println("validation failed since there is difference in the number of files in source and destination")
os.Exit(1)
}
fmt.Printf("successfully validated the source %s and destination %s\n", tbfsc.Object, tbfsc.Subject)
}