in src/managed/DiffGen/DiffGeneration/Workers/TokenizeArchives.cs [170:237]
protected override void ExecuteInternal()
{
string folder = Path.Combine(WorkingFolder, ArchiveSubFolder);
Logger.LogInformation("Tokenizing {ArchivePath} to {folder}", ArchivePath, folder);
Directory.CreateDirectory(folder);
ArchiveTokenization tokens;
const string ARCHIVE_HASH_COOKIE = "ArchiveHash.Cookie";
Hash archiveHash;
string archiveHashCookiePath = Path.Combine(folder, ARCHIVE_HASH_COOKIE);
if (Tokens != null)
{
Logger.LogInformation("Using supplied tokens");
archiveHash = Tokens.ArchiveItem.GetSha256Hash();
}
else
{
archiveHash = Hash.FromFile(ArchivePath);
}
var archiveHashString = archiveHash.ToString();
if (File.Exists(archiveHashCookiePath))
{
var oldHashOfArchive = File.ReadAllText(archiveHashCookiePath);
if (!archiveHashString.Equals(oldHashOfArchive))
{
var message = $"Working directory was already used for another target archive. Clean contents or use another directory." + Environment.NewLine;
message += $"Hash of original archive: {oldHashOfArchive}. {ArchivePath} has a hash of {archiveHashString}";
throw new DiffBuilderException(message, FailureType.BadWorkingFolder);
}
}
else
{
File.WriteAllText(archiveHashCookiePath, archiveHashString);
}
var jsonPath = GetArchiveJson(folder);
var cookiePath = jsonPath + ".cookie";
if (Tokens != null)
{
Logger.LogInformation($"Writing Json: {jsonPath}");
using var jsonStream = File.OpenWrite(jsonPath);
Tokens.WriteJson(jsonStream, true);
}
else
{
CheckForCancellation();
Stopwatch stopWatch = new();
stopWatch.Start();
Logger.LogInformation("Loading tokens from {ArchivePath}", ArchivePath);
LoadTokens(UseCase, folder, out tokens);
stopWatch.Stop();
Logger.LogInformation("Finished loading tokens. Time: {elapsedTime}", stopWatch.Elapsed);
Tokens = tokens;
}
CreateCookie(cookiePath);
}