in asterixdb/asterix-app/src/main/java/org/apache/asterix/app/translator/QueryTranslator.java [1812:1976]
private void doCreateIndexImpl(IHyracksClientConnection hcc, MetadataProvider metadataProvider, Dataset ds,
Index index, EnumSet<JobFlag> jobFlags, SourceLocation sourceLoc, Creator creator,
EntityDetails entityDetails) throws Exception {
ProgressState progress = ProgressState.NO_PROGRESS;
boolean bActiveTxn = true;
MetadataTransactionContext mdTxnCtx = metadataProvider.getMetadataTxnContext();
JobSpecification spec;
try {
index.setPendingOp(MetadataUtil.PENDING_ADD_OP);
if (ds.getDatasetType() == DatasetType.INTERNAL) {
validateDatasetState(metadataProvider, ds, sourceLoc);
} else if (ds.getDatasetType() == DatasetType.EXTERNAL) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc, dataset() + " using "
+ ((ExternalDatasetDetails) ds.getDatasetDetails()).getAdapter() + " adapter can't be indexed");
}
// check whether there exists another enforced index on the same field
if (index.isEnforced()) {
List<List<String>> indexKeyFieldNames;
List<IAType> indexKeyFieldTypes;
switch (Index.IndexCategory.of(index.getIndexType())) {
case VALUE:
Index.ValueIndexDetails valueIndexDetails = (Index.ValueIndexDetails) index.getIndexDetails();
indexKeyFieldNames = valueIndexDetails.getKeyFieldNames();
indexKeyFieldTypes = valueIndexDetails.getKeyFieldTypes();
break;
case TEXT:
Index.TextIndexDetails textIndexDetails = (Index.TextIndexDetails) index.getIndexDetails();
indexKeyFieldNames = textIndexDetails.getKeyFieldNames();
indexKeyFieldTypes = textIndexDetails.getKeyFieldTypes();
break;
default:
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc, "");
}
List<Index> indexes =
MetadataManager.INSTANCE.getDatasetIndexes(metadataProvider.getMetadataTxnContext(),
index.getDatabaseName(), index.getDataverseName(), index.getDatasetName());
for (Index existingIndex : indexes) {
if (!existingIndex.isEnforced()) {
continue;
}
List<List<String>> existingIndexKeyFieldNames;
List<IAType> existingIndexKeyFieldTypes;
switch (Index.IndexCategory.of(existingIndex.getIndexType())) {
case VALUE:
Index.ValueIndexDetails valueIndexDetails =
(Index.ValueIndexDetails) existingIndex.getIndexDetails();
existingIndexKeyFieldNames = valueIndexDetails.getKeyFieldNames();
existingIndexKeyFieldTypes = valueIndexDetails.getKeyFieldTypes();
break;
case TEXT:
Index.TextIndexDetails textIndexDetails =
(Index.TextIndexDetails) existingIndex.getIndexDetails();
existingIndexKeyFieldNames = textIndexDetails.getKeyFieldNames();
existingIndexKeyFieldTypes = textIndexDetails.getKeyFieldTypes();
break;
default:
// ARRAY indexed cannot be enforced yet.
throw new CompilationException(ErrorCode.COMPILATION_ILLEGAL_STATE, sourceLoc, "");
}
if (existingIndexKeyFieldNames.equals(indexKeyFieldNames)
&& !existingIndexKeyFieldTypes.equals(indexKeyFieldTypes)) {
String fieldNames = indexKeyFieldNames.stream().map(RecordUtil::toFullyQualifiedName)
.collect(Collectors.joining(","));
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"Cannot create index " + index.getIndexName() + " , enforced index "
+ existingIndex.getIndexName() + " on field(s) '"
+ LogRedactionUtil.userData(fieldNames) + "' is already defined with type(s) '"
+ StringUtils.join(existingIndexKeyFieldTypes, ',') + "'");
}
}
}
// #. add a new index with PendingAddOp
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
// #. prepare to create the index artifact in NC.
spec = IndexUtil.buildSecondaryIndexCreationJobSpec(ds, index, metadataProvider, sourceLoc);
if (spec == null) {
throw new CompilationException(ErrorCode.COMPILATION_ERROR, sourceLoc,
"Failed to create job spec for creating index '" + ds.getDatasetName() + "."
+ index.getIndexName() + "'");
}
beforeTxnCommit(metadataProvider, creator, entityDetails);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
progress = ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA;
// #. create the index artifact in NC.
runJob(hcc, spec, jobFlags);
// #. flush the internal dataset
// We need this to guarantee the correctness of component Id acceleration for
// secondary-to-primary index.
// Otherwise, the new secondary index component would corresponding to a partial
// memory component
// of the primary index, which is incorrect.
if (ds.getDatasetType() == DatasetType.INTERNAL) {
FlushDatasetUtil.flushDataset(hcc, metadataProvider, index.getDatabaseName(), index.getDataverseName(),
index.getDatasetName());
}
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. load data into the index in NC.
spec = IndexUtil.buildSecondaryIndexLoadingJobSpec(ds, index, metadataProvider, sourceLoc);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, spec, jobFlags);
// #. begin new metadataTxn
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
// #. add another new index with PendingNoOp after deleting the index with
// PendingAddOp
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(), index.getDatabaseName(),
index.getDataverseName(), index.getDatasetName(), index.getIndexName());
index.setPendingOp(MetadataUtil.PENDING_NO_OP);
MetadataManager.INSTANCE.addIndex(metadataProvider.getMetadataTxnContext(), index);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e) {
if (bActiveTxn) {
abort(e, e, mdTxnCtx);
}
if (progress == ProgressState.ADDED_PENDINGOP_RECORD_TO_METADATA) {
// #. execute compensation operations
// remove the index in NC
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
bActiveTxn = true;
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
JobSpecification jobSpec = IndexUtil.buildDropIndexJobSpec(index, metadataProvider, ds, sourceLoc);
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
bActiveTxn = false;
runJob(hcc, jobSpec, jobFlags);
} catch (Exception e2) {
e.addSuppressed(e2);
if (bActiveTxn) {
abort(e, e2, mdTxnCtx);
}
}
// remove the record from the metadata.
mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
metadataProvider.setMetadataTxnContext(mdTxnCtx);
try {
MetadataManager.INSTANCE.dropIndex(metadataProvider.getMetadataTxnContext(),
index.getDatabaseName(), index.getDataverseName(), index.getDatasetName(),
index.getIndexName());
MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
} catch (Exception e2) {
e.addSuppressed(e2);
abort(e, e2, mdTxnCtx);
throw new IllegalStateException("System is in inconsistent state: pending index("
+ index.getDataverseName() + "." + index.getDatasetName() + "." + index.getIndexName()
+ ") couldn't be removed from the metadata", e);
}
}
throw e;
}
}