def insert_data()

in evalbench/databases/bigquery.py [0:0]


    def insert_data(self, data: dict[str, List[str]], setup: Optional[List[str]] = None):
        if not data:
            return
        schema_mapping = self._get_column_name_to_type_mapping(setup)
        insertion_statements = []

        for table_name in data:
            column_names = list(schema_mapping[table_name].keys())
            for row in data[table_name]:
                formatted_values = []

                for index, value in enumerate(row):
                    col_name = column_names[index]
                    col_type = schema_mapping[table_name][col_name].upper()

                    if col_type == 'BOOL':
                        if value == "'1'":
                            formatted_values.append("TRUE")
                        elif value == "'0'":
                            formatted_values.append("FALSE")
                        else:
                            formatted_values.append(f"{value}")
                    elif self._is_float(value):
                        formatted_values.append(f"{value}")
                    elif col_type == 'JSON':
                        formatted_values.append(f"PARSE_JSON({value})")
                    else:
                        escaped_value = value.replace("''", "\\'")
                        formatted_values.append(f"{escaped_value}")

                inline_columns = ", ".join(formatted_values)
                insertion_statements.append(
                    f"INSERT INTO `{self.project_id}.{self.db_name}.{table_name}` VALUES ({inline_columns});"
                )
        try:
            self.batch_execute(insertion_statements)
        except RuntimeError as error:
            raise RuntimeError(f"Could not insert data into database: {error}")