flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks/src/main/java/org/apache/flink/cdc/connectors/starrocks/sink/StarRocksMetadataApplier.java (2 lines): - line 308: // TODO StarRocks plans to support column rename since 3.3 which has not been released. - line 315: // TODO There are limitations for data type conversions. We should know the data types flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mongodb-cdc/src/main/java/org/apache/flink/cdc/connectors/mongodb/source/reader/fetch/MongoDBFetchTaskContext.java (1 line): - line 71: // TODO: replace getBatchSize with getSnapshotBatchSize flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc/src/main/java/org/apache/flink/cdc/connectors/oceanbase/source/OceanBaseRichSourceFunction.java (1 line): - line 425: // TODO record ddl and remove expired table schema flink-cdc-runtime/src/main/java/org/apache/flink/cdc/runtime/typeutils/DataTypeConverter.java (1 line): - line 319: // TODO: Bump Calcite to support its TIMESTAMP_TZ type via #FLINK-37123 flink-cdc-composer/src/main/java/org/apache/flink/cdc/composer/flink/translator/DataSinkTranslator.java (1 line): - line 179: // TODO: Hard coding checkpoint flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-maxcompute/src/main/java/org/apache/flink/cdc/connectors/maxcompute/coordinator/SessionManageOperator.java (1 line): - line 83: /** TODO: a tricky way to get an Operator from sink. */ flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka/src/main/java/org/apache/flink/cdc/connectors/kafka/utils/JsonRowDataSerializationSchemaUtils.java (1 line): - line 33: * Utils for creating JsonRowDataSerializationSchema.TODO: Remove this class after bump to Flink flink-cdc-common/src/main/java/org/apache/flink/cdc/common/utils/DateTimeUtils.java (1 line): - line 98: // TODO use offset, better performance flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc/src/main/java/org/apache/flink/cdc/connectors/postgres/source/utils/CustomPostgresSchema.java (1 line): - line 127: // TODO: check whether we always set isFromSnapshot = true flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc/src/main/java/org/apache/flink/cdc/connectors/mysql/source/assigners/state/PendingSplitsStateSerializer.java (1 line): - line 47: // TODO: need proper implementation of the new version flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc/src/main/java/io/debezium/connector/postgresql/PostgresStreamingChangeEventSource.java (1 line): - line 224: // TODO author=Horia Chiorean date=08/11/2016 description=Ideally we'd close the flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc/src/main/java/io/debezium/connector/postgresql/connection/PostgresReplicationConnection.java (1 line): - line 741: // TODO DBZ-508 get rid of this flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-iceberg/src/main/java/org/apache/flink/cdc/connectors/iceberg/sink/IcebergMetadataApplier.java (1 line): - line 155: // TODO Add more partition transforms, see flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc/src/main/java/io/debezium/connector/postgresql/connection/PostgresConnection.java (1 line): - line 783: // TODO author=Horia Chiorean date=14/11/2016 description=workaround for