sub $len,$len,$taillen()

in crypto/fipsmodule/aes/asm/aesp8-ppc.pl [3191:3612]


	 sub		$len,$len,$taillen
	vxor		$twk3,$tweak,$rndkey0
	vsrab		$tmp,$tweak,$seven	# next tweak value
	vaddubm		$tweak,$tweak,$tweak
	vsldoi		$tmp,$tmp,$tmp,15
	 le?vperm	$in3,$in3,$in3,$leperm
	vand		$tmp,$tmp,$eighty7
	 vxor		$out3,$in3,$twk3
	vxor		$tweak,$tweak,$tmp

	 lvx_u		$in4,$x40,$inp
	 subi		$len,$len,0x60
	vxor		$twk4,$tweak,$rndkey0
	vsrab		$tmp,$tweak,$seven	# next tweak value
	vaddubm		$tweak,$tweak,$tweak
	vsldoi		$tmp,$tmp,$tmp,15
	 le?vperm	$in4,$in4,$in4,$leperm
	vand		$tmp,$tmp,$eighty7
	 vxor		$out4,$in4,$twk4
	vxor		$tweak,$tweak,$tmp

	 lvx_u		$in5,$x50,$inp
	 addi		$inp,$inp,0x60
	vxor		$twk5,$tweak,$rndkey0
	vsrab		$tmp,$tweak,$seven	# next tweak value
	vaddubm		$tweak,$tweak,$tweak
	vsldoi		$tmp,$tmp,$tmp,15
	 le?vperm	$in5,$in5,$in5,$leperm
	vand		$tmp,$tmp,$eighty7
	 vxor		$out5,$in5,$twk5
	vxor		$tweak,$tweak,$tmp

	vxor		v31,v31,$rndkey0
	mtctr		$rounds
	b		Loop_xts_dec6x

.align	5
Loop_xts_dec6x:
	vncipher	$out0,$out0,v24
	vncipher	$out1,$out1,v24
	vncipher	$out2,$out2,v24
	vncipher	$out3,$out3,v24
	vncipher	$out4,$out4,v24
	vncipher	$out5,$out5,v24
	lvx		v24,$x20,$key_		# round[3]
	addi		$key_,$key_,0x20

	vncipher	$out0,$out0,v25
	vncipher	$out1,$out1,v25
	vncipher	$out2,$out2,v25
	vncipher	$out3,$out3,v25
	vncipher	$out4,$out4,v25
	vncipher	$out5,$out5,v25
	lvx		v25,$x10,$key_		# round[4]
	bdnz		Loop_xts_dec6x

	subic		$len,$len,96		# $len-=96
	 vxor		$in0,$twk0,v31		# xor with last round key
	vncipher	$out0,$out0,v24
	vncipher	$out1,$out1,v24
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk0,$tweak,$rndkey0
	 vaddubm	$tweak,$tweak,$tweak
	vncipher	$out2,$out2,v24
	vncipher	$out3,$out3,v24
	 vsldoi		$tmp,$tmp,$tmp,15
	vncipher	$out4,$out4,v24
	vncipher	$out5,$out5,v24

	subfe.		r0,r0,r0		# borrow?-1:0
	 vand		$tmp,$tmp,$eighty7
	vncipher	$out0,$out0,v25
	vncipher	$out1,$out1,v25
	 vxor		$tweak,$tweak,$tmp
	vncipher	$out2,$out2,v25
	vncipher	$out3,$out3,v25
	 vxor		$in1,$twk1,v31
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk1,$tweak,$rndkey0
	vncipher	$out4,$out4,v25
	vncipher	$out5,$out5,v25

	and		r0,r0,$len
	 vaddubm	$tweak,$tweak,$tweak
	 vsldoi		$tmp,$tmp,$tmp,15
	vncipher	$out0,$out0,v26
	vncipher	$out1,$out1,v26
	 vand		$tmp,$tmp,$eighty7
	vncipher	$out2,$out2,v26
	vncipher	$out3,$out3,v26
	 vxor		$tweak,$tweak,$tmp
	vncipher	$out4,$out4,v26
	vncipher	$out5,$out5,v26

	add		$inp,$inp,r0		# $inp is adjusted in such
						# way that at exit from the
						# loop inX-in5 are loaded
						# with last "words"
	 vxor		$in2,$twk2,v31
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk2,$tweak,$rndkey0
	 vaddubm	$tweak,$tweak,$tweak
	vncipher	$out0,$out0,v27
	vncipher	$out1,$out1,v27
	 vsldoi		$tmp,$tmp,$tmp,15
	vncipher	$out2,$out2,v27
	vncipher	$out3,$out3,v27
	 vand		$tmp,$tmp,$eighty7
	vncipher	$out4,$out4,v27
	vncipher	$out5,$out5,v27

	addi		$key_,$sp,`$FRAME+15`	# rewind $key_
	 vxor		$tweak,$tweak,$tmp
	vncipher	$out0,$out0,v28
	vncipher	$out1,$out1,v28
	 vxor		$in3,$twk3,v31
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk3,$tweak,$rndkey0
	vncipher	$out2,$out2,v28
	vncipher	$out3,$out3,v28
	 vaddubm	$tweak,$tweak,$tweak
	 vsldoi		$tmp,$tmp,$tmp,15
	vncipher	$out4,$out4,v28
	vncipher	$out5,$out5,v28
	lvx		v24,$x00,$key_		# re-pre-load round[1]
	 vand		$tmp,$tmp,$eighty7

	vncipher	$out0,$out0,v29
	vncipher	$out1,$out1,v29
	 vxor		$tweak,$tweak,$tmp
	vncipher	$out2,$out2,v29
	vncipher	$out3,$out3,v29
	 vxor		$in4,$twk4,v31
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk4,$tweak,$rndkey0
	vncipher	$out4,$out4,v29
	vncipher	$out5,$out5,v29
	lvx		v25,$x10,$key_		# re-pre-load round[2]
	 vaddubm	$tweak,$tweak,$tweak
	 vsldoi		$tmp,$tmp,$tmp,15

	vncipher	$out0,$out0,v30
	vncipher	$out1,$out1,v30
	 vand		$tmp,$tmp,$eighty7
	vncipher	$out2,$out2,v30
	vncipher	$out3,$out3,v30
	 vxor		$tweak,$tweak,$tmp
	vncipher	$out4,$out4,v30
	vncipher	$out5,$out5,v30
	 vxor		$in5,$twk5,v31
	 vsrab		$tmp,$tweak,$seven	# next tweak value
	 vxor		$twk5,$tweak,$rndkey0

	vncipherlast	$out0,$out0,$in0
	 lvx_u		$in0,$x00,$inp		# load next input block
	 vaddubm	$tweak,$tweak,$tweak
	 vsldoi		$tmp,$tmp,$tmp,15
	vncipherlast	$out1,$out1,$in1
	 lvx_u		$in1,$x10,$inp
	vncipherlast	$out2,$out2,$in2
	 le?vperm	$in0,$in0,$in0,$leperm
	 lvx_u		$in2,$x20,$inp
	 vand		$tmp,$tmp,$eighty7
	vncipherlast	$out3,$out3,$in3
	 le?vperm	$in1,$in1,$in1,$leperm
	 lvx_u		$in3,$x30,$inp
	vncipherlast	$out4,$out4,$in4
	 le?vperm	$in2,$in2,$in2,$leperm
	 lvx_u		$in4,$x40,$inp
	 vxor		$tweak,$tweak,$tmp
	vncipherlast	$out5,$out5,$in5
	 le?vperm	$in3,$in3,$in3,$leperm
	 lvx_u		$in5,$x50,$inp
	 addi		$inp,$inp,0x60
	 le?vperm	$in4,$in4,$in4,$leperm
	 le?vperm	$in5,$in5,$in5,$leperm

	le?vperm	$out0,$out0,$out0,$leperm
	le?vperm	$out1,$out1,$out1,$leperm
	stvx_u		$out0,$x00,$out		# store output
	 vxor		$out0,$in0,$twk0
	le?vperm	$out2,$out2,$out2,$leperm
	stvx_u		$out1,$x10,$out
	 vxor		$out1,$in1,$twk1
	le?vperm	$out3,$out3,$out3,$leperm
	stvx_u		$out2,$x20,$out
	 vxor		$out2,$in2,$twk2
	le?vperm	$out4,$out4,$out4,$leperm
	stvx_u		$out3,$x30,$out
	 vxor		$out3,$in3,$twk3
	le?vperm	$out5,$out5,$out5,$leperm
	stvx_u		$out4,$x40,$out
	 vxor		$out4,$in4,$twk4
	stvx_u		$out5,$x50,$out
	 vxor		$out5,$in5,$twk5
	addi		$out,$out,0x60

	mtctr		$rounds
	beq		Loop_xts_dec6x		# did $len-=96 borrow?

	addic.		$len,$len,0x60
	beq		Lxts_dec6x_zero
	cmpwi		$len,0x20
	blt		Lxts_dec6x_one
	nop
	beq		Lxts_dec6x_two
	cmpwi		$len,0x40
	blt		Lxts_dec6x_three
	nop
	beq		Lxts_dec6x_four

Lxts_dec6x_five:
	vxor		$out0,$in1,$twk0
	vxor		$out1,$in2,$twk1
	vxor		$out2,$in3,$twk2
	vxor		$out3,$in4,$twk3
	vxor		$out4,$in5,$twk4

	bl		_aesp8_xts_dec5x

	le?vperm	$out0,$out0,$out0,$leperm
	vmr		$twk0,$twk5		# unused tweak
	vxor		$twk1,$tweak,$rndkey0
	le?vperm	$out1,$out1,$out1,$leperm
	stvx_u		$out0,$x00,$out		# store output
	vxor		$out0,$in0,$twk1
	le?vperm	$out2,$out2,$out2,$leperm
	stvx_u		$out1,$x10,$out
	le?vperm	$out3,$out3,$out3,$leperm
	stvx_u		$out2,$x20,$out
	le?vperm	$out4,$out4,$out4,$leperm
	stvx_u		$out3,$x30,$out
	stvx_u		$out4,$x40,$out
	addi		$out,$out,0x50
	bne		Lxts_dec6x_steal
	b		Lxts_dec6x_done

.align	4
Lxts_dec6x_four:
	vxor		$out0,$in2,$twk0
	vxor		$out1,$in3,$twk1
	vxor		$out2,$in4,$twk2
	vxor		$out3,$in5,$twk3
	vxor		$out4,$out4,$out4

	bl		_aesp8_xts_dec5x

	le?vperm	$out0,$out0,$out0,$leperm
	vmr		$twk0,$twk4		# unused tweak
	vmr		$twk1,$twk5
	le?vperm	$out1,$out1,$out1,$leperm
	stvx_u		$out0,$x00,$out		# store output
	vxor		$out0,$in0,$twk5
	le?vperm	$out2,$out2,$out2,$leperm
	stvx_u		$out1,$x10,$out
	le?vperm	$out3,$out3,$out3,$leperm
	stvx_u		$out2,$x20,$out
	stvx_u		$out3,$x30,$out
	addi		$out,$out,0x40
	bne		Lxts_dec6x_steal
	b		Lxts_dec6x_done

.align	4
Lxts_dec6x_three:
	vxor		$out0,$in3,$twk0
	vxor		$out1,$in4,$twk1
	vxor		$out2,$in5,$twk2
	vxor		$out3,$out3,$out3
	vxor		$out4,$out4,$out4

	bl		_aesp8_xts_dec5x

	le?vperm	$out0,$out0,$out0,$leperm
	vmr		$twk0,$twk3		# unused tweak
	vmr		$twk1,$twk4
	le?vperm	$out1,$out1,$out1,$leperm
	stvx_u		$out0,$x00,$out		# store output
	vxor		$out0,$in0,$twk4
	le?vperm	$out2,$out2,$out2,$leperm
	stvx_u		$out1,$x10,$out
	stvx_u		$out2,$x20,$out
	addi		$out,$out,0x30
	bne		Lxts_dec6x_steal
	b		Lxts_dec6x_done

.align	4
Lxts_dec6x_two:
	vxor		$out0,$in4,$twk0
	vxor		$out1,$in5,$twk1
	vxor		$out2,$out2,$out2
	vxor		$out3,$out3,$out3
	vxor		$out4,$out4,$out4

	bl		_aesp8_xts_dec5x

	le?vperm	$out0,$out0,$out0,$leperm
	vmr		$twk0,$twk2		# unused tweak
	vmr		$twk1,$twk3
	le?vperm	$out1,$out1,$out1,$leperm
	stvx_u		$out0,$x00,$out		# store output
	vxor		$out0,$in0,$twk3
	stvx_u		$out1,$x10,$out
	addi		$out,$out,0x20
	bne		Lxts_dec6x_steal
	b		Lxts_dec6x_done

.align	4
Lxts_dec6x_one:
	vxor		$out0,$in5,$twk0
	nop
Loop_xts_dec1x:
	vncipher	$out0,$out0,v24
	lvx		v24,$x20,$key_		# round[3]
	addi		$key_,$key_,0x20

	vncipher	$out0,$out0,v25
	lvx		v25,$x10,$key_		# round[4]
	bdnz		Loop_xts_dec1x

	subi		r0,$taillen,1
	vncipher	$out0,$out0,v24

	andi.		r0,r0,16
	cmpwi		$taillen,0
	vncipher	$out0,$out0,v25

	sub		$inp,$inp,r0
	vncipher	$out0,$out0,v26

	lvx_u		$in0,0,$inp
	vncipher	$out0,$out0,v27

	addi		$key_,$sp,`$FRAME+15`	# rewind $key_
	vncipher	$out0,$out0,v28
	lvx		v24,$x00,$key_		# re-pre-load round[1]

	vncipher	$out0,$out0,v29
	lvx		v25,$x10,$key_		# re-pre-load round[2]
	 vxor		$twk0,$twk0,v31

	le?vperm	$in0,$in0,$in0,$leperm
	vncipher	$out0,$out0,v30

	mtctr		$rounds
	vncipherlast	$out0,$out0,$twk0

	vmr		$twk0,$twk1		# unused tweak
	vmr		$twk1,$twk2
	le?vperm	$out0,$out0,$out0,$leperm
	stvx_u		$out0,$x00,$out		# store output
	addi		$out,$out,0x10
	vxor		$out0,$in0,$twk2
	bne		Lxts_dec6x_steal
	b		Lxts_dec6x_done

.align	4
Lxts_dec6x_zero:
	cmpwi		$taillen,0
	beq		Lxts_dec6x_done

	lvx_u		$in0,0,$inp
	le?vperm	$in0,$in0,$in0,$leperm
	vxor		$out0,$in0,$twk1
Lxts_dec6x_steal:
	vncipher	$out0,$out0,v24
	lvx		v24,$x20,$key_		# round[3]
	addi		$key_,$key_,0x20

	vncipher	$out0,$out0,v25
	lvx		v25,$x10,$key_		# round[4]
	bdnz		Lxts_dec6x_steal

	add		$inp,$inp,$taillen
	vncipher	$out0,$out0,v24

	cmpwi		$taillen,0
	vncipher	$out0,$out0,v25

	lvx_u		$in0,0,$inp
	vncipher	$out0,$out0,v26

	lvsr		$inpperm,0,$taillen	# $in5 is no more
	vncipher	$out0,$out0,v27

	addi		$key_,$sp,`$FRAME+15`	# rewind $key_
	vncipher	$out0,$out0,v28
	lvx		v24,$x00,$key_		# re-pre-load round[1]

	vncipher	$out0,$out0,v29
	lvx		v25,$x10,$key_		# re-pre-load round[2]
	 vxor		$twk1,$twk1,v31

	le?vperm	$in0,$in0,$in0,$leperm
	vncipher	$out0,$out0,v30

	vperm		$in0,$in0,$in0,$inpperm
	vncipherlast	$tmp,$out0,$twk1

	le?vperm	$out0,$tmp,$tmp,$leperm
	le?stvx_u	$out0,0,$out
	be?stvx_u	$tmp,0,$out

	vxor		$out0,$out0,$out0
	vspltisb	$out1,-1
	vperm		$out0,$out0,$out1,$inpperm
	vsel		$out0,$in0,$tmp,$out0
	vxor		$out0,$out0,$twk0

	subi		r30,$out,1
	mtctr		$taillen
Loop_xts_dec6x_steal:
	lbzu		r0,1(r30)
	stb		r0,16(r30)
	bdnz		Loop_xts_dec6x_steal

	li		$taillen,0
	mtctr		$rounds
	b		Loop_xts_dec1x		# one more time...

.align	4
Lxts_dec6x_done:
	${UCMP}i	$ivp,0