func ReadArrowTableToRows()

in spark/sql/types/arrow.go [32:69]


func ReadArrowTableToRows(table arrow.Table) ([]Row, error) {
	result := make([]Row, table.NumRows())

	// For each column in the table, read the data and convert it to an array of any.
	cols := make([][]any, table.NumCols())
	for i := 0; i < int(table.NumCols()); i++ {
		chunkedColumn := table.Column(i).Data()
		column, err := readChunkedColumn(chunkedColumn)
		if err != nil {
			return nil, err
		}
		cols[i] = column
	}

	// Create a list of field names for the rows.
	fieldNames := make([]string, table.NumCols())
	for i, field := range table.Schema().Fields() {
		fieldNames[i] = field.Name
	}

	// Create the rows:
	for i := 0; i < int(table.NumRows()); i++ {
		row := make([]any, table.NumCols())
		for j := 0; j < int(table.NumCols()); j++ {
			row[j] = cols[j][i]
		}
		r := &rowImpl{
			values:  row,
			offsets: make(map[string]int),
		}
		for j, fieldName := range fieldNames {
			r.offsets[fieldName] = j
		}
		result[i] = r
	}

	return result, nil
}