part_generator.py [237:286]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            affine_item = np.array(item) - 256.
            affine_item = np.transpose(np.matmul(rotate_mat, np.transpose(affine_item)))
            affine_item[:, 0] += translate_x
            affine_item[:, 1] += translate_y
            affine_item *= scale
            affine_data.append(affine_item + 256.)
        return affine_data

    def processed_part_to_raster(self, vector_part, side=64, line_diameter=16, padding=16, bg_color=(0,0,0), fg_color=(1,1,1)):
        """
        render raster image based on the processed part
        """
        original_side = 512.
        surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, side, side)
        ctx = cairo.Context(surface)
        ctx.set_antialias(cairo.ANTIALIAS_BEST)
        ctx.set_line_cap(cairo.LINE_CAP_ROUND)
        ctx.set_line_join(cairo.LINE_JOIN_ROUND)
        ctx.set_line_width(line_diameter)
        # scale to match the new size
        # add padding at the edges for the line_diameter
        # and add additional padding to account for antialiasing
        total_padding = padding * 2. + line_diameter
        new_scale = float(side) / float(original_side + total_padding)
        ctx.scale(new_scale, new_scale)
        ctx.translate(total_padding / 2., total_padding / 2.)
        raster_images = []
        # clear background
        ctx.set_source_rgb(*bg_color)
        ctx.paint()
        # draw strokes, this is the most cpu-intensive part
        ctx.set_source_rgb(*fg_color)
        for stroke in vector_part:
            if len(stroke) == 0:
                continue
            ctx.move_to(stroke[0][0], stroke[0][1])
            for x, y in stroke:
                ctx.line_to(x, y)
            ctx.stroke()
        surface_data = surface.get_data()
        raster_image = np.copy(np.asarray(surface_data))[::4].reshape(side, side)
        return torch.FloatTensor(raster_image/255.)[None, :, :]

# exponential moving average helpers

def ema_inplace(moving_avg, new, decay):
    if is_empty(moving_avg):
        moving_avg.data.copy_(new)
        return
    moving_avg.data.mul_(decay).add_(1 - decay, new)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



part_selector.py [175:224]:
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
            affine_item = np.array(item) - 256.
            affine_item = np.transpose(np.matmul(rotate_mat, np.transpose(affine_item)))
            affine_item[:, 0] += translate_x
            affine_item[:, 1] += translate_y
            affine_item *= scale
            affine_data.append(affine_item + 256.)
        return affine_data

    def processed_part_to_raster(self, vector_part, side=64, line_diameter=16, padding=16, bg_color=(0,0,0), fg_color=(1,1,1)):
        """
        render raster image based on the processed part
        """
        original_side = 512.
        surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, side, side)
        ctx = cairo.Context(surface)
        ctx.set_antialias(cairo.ANTIALIAS_BEST)
        ctx.set_line_cap(cairo.LINE_CAP_ROUND)
        ctx.set_line_join(cairo.LINE_JOIN_ROUND)
        ctx.set_line_width(line_diameter)
        # scale to match the new size
        # add padding at the edges for the line_diameter
        # and add additional padding to account for antialiasing
        total_padding = padding * 2. + line_diameter
        new_scale = float(side) / float(original_side + total_padding)
        ctx.scale(new_scale, new_scale)
        ctx.translate(total_padding / 2., total_padding / 2.)
        raster_images = []
        # clear background
        ctx.set_source_rgb(*bg_color)
        ctx.paint()
        # draw strokes, this is the most cpu-intensive part
        ctx.set_source_rgb(*fg_color)
        for stroke in vector_part:
            if len(stroke) == 0:
                continue
            ctx.move_to(stroke[0][0], stroke[0][1])
            for x, y in stroke:
                ctx.line_to(x, y)
            ctx.stroke()
        surface_data = surface.get_data()
        raster_image = np.copy(np.asarray(surface_data))[::4].reshape(side, side)
        return torch.FloatTensor(raster_image/255.)[None, :, :]

# exponential moving average helpers

def ema_inplace(moving_avg, new, decay):
    if is_empty(moving_avg):
        moving_avg.data.copy_(new)
        return
    moving_avg.data.mul_(decay).add_(1 - decay, new)
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -



