in packages/core/src/codewhisperer/service/transformByQ/transformApiHandler.ts [143:338]
await sleep(1000 * Math.pow(2, i))
}
}
}
if (!response || response.status !== 200) {
const uploadFailedError = `Upload failed after up to 4 attempts with status code = ${response?.status ?? 'unavailable'}`
getLogger().error(`CodeTransformation: ${uploadFailedError}`)
throw new Error(uploadFailedError)
}
getLogger().info('CodeTransformation: Upload to S3 succeeded')
} catch (e: any) {
let errorMessage = `The upload failed due to: ${(e as Error).message}. For more information, see the [Amazon Q documentation](${CodeWhispererConstants.codeTransformTroubleshootUploadError})`
if (errorMessage.includes('Request has expired')) {
errorMessage = CodeWhispererConstants.errorUploadingWithExpiredUrl
} else if (errorMessage.includes('Failed to establish a socket connection')) {
errorMessage = CodeWhispererConstants.socketConnectionFailed
} else if (errorMessage.includes('self signed certificate in certificate chain')) {
errorMessage = CodeWhispererConstants.selfSignedCertificateError
}
getLogger().error(`CodeTransformation: UploadZip error = ${e}`)
throw new Error(errorMessage)
}
}
export async function resumeTransformationJob(jobId: string, userActionStatus: TransformationUserActionStatus) {
try {
const response = await codeWhisperer.codeWhispererClient.codeModernizerResumeTransformation({
transformationJobId: jobId,
userActionStatus, // can be "COMPLETED" or "REJECTED"
})
getLogger().info(
`CodeTransformation: resumeTransformation API status code = ${response.$response.httpResponse.statusCode}`
)
return response.transformationStatus
} catch (e: any) {
const errorMessage = `Resuming the job failed due to: ${(e as Error).message}`
getLogger().error(`CodeTransformation: ResumeTransformation error = %O`, e)
throw new Error(errorMessage)
}
}
export async function stopJob(jobId: string) {
if (!jobId) {
return
}
try {
await codeWhisperer.codeWhispererClient.codeModernizerStopCodeTransformation({
transformationJobId: jobId,
})
} catch (e: any) {
transformByQState.setJobFailureMetadata(` (request ID: ${e.requestId ?? 'unavailable'})`)
getLogger().error(`CodeTransformation: StopTransformation error = %O`, e)
throw new Error('Stop job failed')
}
}
export async function uploadPayload(
payloadFileName: string,
profile: RegionProfile | undefined,
uploadContext?: UploadContext
) {
const buffer = Buffer.from(await fs.readFileBytes(payloadFileName))
const sha256 = getSha256(buffer)
throwIfCancelled()
let response = undefined
try {
response = await codeWhisperer.codeWhispererClient.createUploadUrl({
contentChecksum: sha256,
contentChecksumType: CodeWhispererConstants.contentChecksumType,
uploadIntent: CodeWhispererConstants.uploadIntent,
uploadContext,
profileArn: profile?.arn,
})
} catch (e: any) {
const errorMessage = `Creating the upload URL failed due to: ${(e as Error).message}`
transformByQState.setJobFailureMetadata(` (request ID: ${e.requestId ?? 'unavailable'})`)
getLogger().error(`CodeTransformation: CreateUploadUrl error: = %O`, e)
throw new Error(errorMessage)
}
getLogger().info('CodeTransformation: created upload URL successfully')
try {
await uploadArtifactToS3(payloadFileName, response, sha256, buffer)
} catch (e: any) {
const errorMessage = (e as Error).message
getLogger().error(`CodeTransformation: UploadArtifactToS3 error: = ${errorMessage}`)
throw new Error(errorMessage)
}
// UploadContext only exists for subsequent uploads, and they will return a uploadId that is NOT
// the jobId. Only the initial call will uploadId be the jobId
if (!uploadContext) {
transformByQState.setJobId(encodeHTML(response.uploadId))
}
jobPlanProgress['uploadCode'] = StepProgress.Succeeded
if (transformByQState.getTransformationType() === TransformationType.SQL_CONVERSION) {
// if doing a SQL conversion, we don't build the code or generate a plan, so mark these steps as succeeded immediately so that next step renders
jobPlanProgress['buildCode'] = StepProgress.Succeeded
jobPlanProgress['generatePlan'] = StepProgress.Succeeded
}
updateJobHistory()
return response.uploadId
}
/**
* Array of file extensions used by Maven as metadata in the local repository.
* Files with these extensions influence Maven's behavior during compile time,
* particularly in checking the availability of source repositories and potentially
* re-downloading dependencies if the source is not accessible. Removing these
* files can prevent Maven from attempting to download dependencies again.
*/
const mavenExcludedExtensions = ['.repositories', '.sha1']
// exclude .DS_Store (not relevant) and Maven executables (can cause permissions issues when building if user has not ran 'chmod')
const sourceExcludedExtensions = ['.DS_Store', 'mvnw', 'mvnw.cmd']
/**
* Determines if the specified file path corresponds to a Maven metadata file
* by checking against known metadata file extensions. This is used to identify
* files that might trigger Maven to recheck or redownload dependencies from source repositories.
*
* @param path The file path to evaluate for exclusion based on its extension.
* @returns {boolean} Returns true if the path ends with an extension associated with Maven metadata files; otherwise, false.
*/
function isExcludedDependencyFile(path: string): boolean {
return mavenExcludedExtensions.some((extension) => path.endsWith(extension))
}
// do not zip the .DS_Store file as it may appear in the diff.patch
function isExcludedSourceFile(path: string): boolean {
return sourceExcludedExtensions.some((extension) => path.endsWith(extension))
}
// zip all dependency files and all source files excluding "target" (contains large JARs) plus ".git" and ".idea" (may appear in diff.patch)
export function getFilesRecursively(dir: string, isDependenciesFolder: boolean): string[] {
const entries = nodefs.readdirSync(dir, { withFileTypes: true })
const files = entries.flatMap((entry) => {
const res = path.resolve(dir, entry.name)
if (entry.isDirectory()) {
if (isDependenciesFolder) {
// include all dependency files
return getFilesRecursively(res, isDependenciesFolder)
} else if (entry.name !== 'target' && entry.name !== '.git' && entry.name !== '.idea') {
// exclude the above directories when zipping source code
return getFilesRecursively(res, isDependenciesFolder)
} else {
return []
}
} else {
return [res]
}
})
return files
}
interface IZipManifestParams {
hilZipParams?: IHilZipManifestParams
}
export function createZipManifest({ hilZipParams }: IZipManifestParams) {
const zipManifest = hilZipParams ? new HilZipManifest(hilZipParams) : new ZipManifest()
return zipManifest
}
interface IZipCodeParams {
dependenciesFolder?: FolderInfo
humanInTheLoopFlag?: boolean
projectPath?: string
zipManifest: ZipManifest | HilZipManifest
}
interface ZipCodeResult {
dependenciesCopied: boolean
tempFilePath: string
fileSize: number
}
export async function zipCode(
{ dependenciesFolder, humanInTheLoopFlag, projectPath, zipManifest }: IZipCodeParams,
zip: AdmZip = new AdmZip()
) {
let tempFilePath = undefined
let logFilePath = undefined
let dependenciesCopied = false
try {
throwIfCancelled()
// if no project Path is passed in, we are not uploaded the source folder
// we only upload dependencies for human in the loop work
if (projectPath) {
const sourceFiles = getFilesRecursively(projectPath, false)
let sourceFilesSize = 0
for (const file of sourceFiles) {
if (nodefs.statSync(file).isDirectory() || isExcludedSourceFile(file)) {