in vault-core/src/main/java/org/apache/jackrabbit/vault/fs/impl/TransactionImpl.java [204:359]
public Collection<Info> commit() throws RepositoryException, IOException {
Map<String, Info> infos = new HashMap<String, Info>();
// remember all nodes to checkin again
ImportInfoImpl allInfos = new ImportInfoImpl();
// first scan all changes for additions that need to be attached to a
// .content.xml change
if (!dotXmlNodes.isEmpty()) {
Iterator<Change> iter = changes.iterator();
while (iter.hasNext()) {
Change c = iter.next();
if (c.type == Type.ADDED) {
if (c.isa.getType() == ArtifactType.BINARY) {
DotXmlInfo dxi = dotXmlNodes.get(c.repoPath);
if (dxi != null) {
dxi.change.add(c);
iter.remove();
}
}
} else if (c.type == Type.MKDIR) {
DotXmlInfo dxi = dotXmlNodes.get(c.repoPath);
if (dxi != null) {
iter.remove();
}
}
}
}
// process the changes and group them by artifact path
Map<String, TxInfo> modified = new TreeMap<String, TxInfo>(new PathComparator());
boolean ignoreMP = true;
while (!changes.isEmpty()) {
int size = changes.size();
// process as many changes that have a parent file
Iterator<Change> iter = changes.iterator();
while (iter.hasNext()) {
Change change = iter.next();
if (processChange(change, modified, ignoreMP)) {
changes.remove(change);
iter = changes.iterator();
}
}
if (changes.size() == size) {
if (ignoreMP) {
ignoreMP = false;
} else {
for (Change c: changes) {
infos.put(c.filePath, new Info(Type.ERROR, c.filePath));
}
// abort iteration
changes.clear();
}
} else {
// write back the current collected modifications and generate a
// new modified info list
for (TxInfo info : modified.values()) {
// check if primary artifact is still present
if (info.out == null && info.aggregate == null) {
// this was an intermediate directory delete
for (String path: info.original.keySet()) {
infos.put(path, new Info(Type.DELETED, path));
if (verbose) {
log.info("...comitting DEL {}", path);
}
}
} else if (info.out.getArtifacts().isEmpty() && info.aggregate != null) {
// delete entire node if aggregate is still attached
if (info.aggregate.isAttached()) {
info.aggregate.remove(false);
}
// generate infos for the deleted ones
for (String path: info.original.keySet()) {
infos.put(path, new Info(Type.DELETED, path));
if (verbose) {
log.info("...comitting DEL {}", path);
}
}
// mark the primary artifact of the parent as modified
// TODO fix
String cXmlPath = info.parentFile.getPath();
if (cXmlPath.endsWith("/")) {
cXmlPath+= Constants.DOT_CONTENT_XML;
} else {
cXmlPath+= "/" + Constants.DOT_CONTENT_XML;
}
Info i = infos.get(cXmlPath);
if (i == null) {
infos.put(cXmlPath, new Info(Type.MODIFIED, cXmlPath));
}
} else if (info.aggregate == null) {
// this was and addition
// for now, just guess from the artifacts the new files
String parentPath = info.parentFile.getPath();
if (!parentPath.endsWith("/")) {
parentPath += "/";
}
for (Artifact a: info.out.getArtifacts().values()) {
if (a instanceof ImportArtifact) {
String path = parentPath + a.getPlatformPath();
infos.put(path, new Info(Type.ADDED, path));
}
}
ImportInfo ret = info.out.close();
if (ret != null) {
allInfos.merge(ret);
if (verbose) {
for (Map.Entry<String,ImportInfo.Type> e: ret.getModifications().entrySet()) {
log.info("...committing {} {}", e.getValue(), e.getKey());
}
}
}
// modify parent
infos.put(info.parentFile.getPath(), new Info(Type.MODIFIED, info.parentFile.getPath()));
} else {
// this was a modification
ImportInfo ret = info.out.close();
if (ret != null) {
allInfos.merge(ret);
}
for (VaultFile file: info.original.values()) {
infos.put(file.getPath(), new Info(Type.MODIFIED, file.getPath()));
if (verbose) {
log.info("...comitting UPD {}", file.getPath());
}
}
if (verbose && ret != null) {
for (Map.Entry<String,ImportInfo.Type> e: ret.getModifications().entrySet()) {
log.info("...committing {} {}", e.getValue(), e.getKey());
}
}
}
}
}
modified.clear();
fs.invalidate();
}
if (verbose) {
log.info("Persisting changes...");
}
if (allInfos.numErrors() > 0) {
try {
fs.getAggregateManager().getSession().refresh(false);
} catch (RepositoryException e) {
// ignore
}
throw new RepositoryException("There were errors during commit. Aborting transaction.");
}
fs.getAggregateManager().getSession().save();
allInfos.checkinNodes(fs.getAggregateManager().getSession());
fs.invalidate();
return infos.values();
}