in openjpa-jdbc/src/main/java/org/apache/openjpa/jdbc/meta/MappingTool.java [491:629]
public void record(MappingTool.Flags flags) {
MappingRepository repos = getRepository();
MetaDataFactory io = repos.getMetaDataFactory();
ClassMapping[] mappings;
if (!ACTION_DROP.equals(_action))
mappings = repos.getMappings();
else if (_dropMap != null)
mappings = (ClassMapping[]) _dropMap.toArray
(new ClassMapping[_dropMap.size()]);
else
mappings = new ClassMapping[0];
try {
if (_dropCls != null && !_dropCls.isEmpty()) {
Class<?>[] cls = (Class[]) _dropCls.toArray
(new Class[_dropCls.size()]);
if (!io.drop(cls, _mode, null))
_log.warn(_loc.get("bad-drop", _dropCls));
}
if (_flushSchema) {
// drop portions of the known schema that no mapping uses, and
// add sequences used for value generation
if (_dropUnused)
dropUnusedSchemaComponents(mappings);
addSequenceComponents(mappings);
// now run the schematool as long as we're doing some schema
// action and the user doesn't just want an xml output
String[] schemaActions = _schemaActions.split(",");
for (String schemaAction : schemaActions) {
if (!SCHEMA_ACTION_NONE.equals(schemaAction)
&& (_schemaWriter == null || (_schemaTool != null
&& _schemaTool.getWriter() != null))) {
SchemaTool tool;
if (schemaAction.equals(ACTION_SCRIPT_CREATE) ||
schemaAction.equals(ACTION_SCRIPT_DROP) ||
schemaAction.equals(ACTION_SCRIPT_LOAD)) {
tool = newSchemaTool(SchemaTool.ACTION_EXECUTE_SCRIPT);
}
else {
tool = newSchemaTool(schemaAction);
}
if (schemaAction.equals(SchemaTool.ACTION_BUILD) && _conf.getCreateScriptTarget() != null) {
tool.setWriter(new PrintWriter(_conf.getCreateScriptTarget()));
tool.setIndexes(true);
tool.setForeignKeys(true);
tool.setSequences(true);
}
if (schemaAction.equals(SchemaTool.ACTION_DROP) && _conf.getDropScriptTarget() != null) {
tool.setWriter(new PrintWriter(_conf.getDropScriptTarget()));
}
// configure the tool with additional settings
if (flags != null) {
tool.setDropTables(flags.dropTables);
tool.setRollbackBeforeDDL(flags.rollbackBeforeDDL);
tool.setDropSequences(flags.dropSequences);
tool.setWriter(flags.sqlWriter);
tool.setOpenJPATables(flags.openjpaTables);
tool.setSQLTerminator(flags.sqlTerminator);
}
switch (schemaAction) {
case ACTION_SCRIPT_CREATE:
tool.setScriptToExecute(_conf.getCreateScriptSource());
break;
case ACTION_SCRIPT_DROP:
tool.setScriptToExecute(_conf.getDropScriptSource());
break;
case ACTION_SCRIPT_LOAD:
tool.setScriptToExecute(_conf.getLoadScriptSource());
break;
}
tool.setSchemaGroup(getSchemaGroup());
tool.run();
tool.record();
tool.clear();
}
}
// xml output of schema?
if (_schemaWriter != null) {
// serialize the planned schema to the stream
SchemaSerializer ser = new XMLSchemaSerializer(_conf);
ser.addAll(getSchemaGroup());
ser.serialize(_schemaWriter, MetaDataSerializer.PRETTY);
_schemaWriter.flush();
}
}
if (!_flush)
return;
QueryMetaData[] queries = repos.getQueryMetaDatas();
SequenceMetaData[] seqs = repos.getSequenceMetaDatas();
Map<File, String> output = null;
// if we're outputting to stream, set all metas to same file so
// they get placed in single string
if (_mappingWriter != null) {
output = new HashMap<>();
File tmp = new File("openjpatmp");
for (ClassMapping mapping : mappings) {
mapping.setSource(tmp, SourceTracker.SRC_OTHER, "openjpatmp");
}
for (QueryMetaData query : queries) {
query.setSource(tmp, query.getSourceScope(), SourceTracker.SRC_OTHER, "openjpatmp");
}
for (SequenceMetaData seq : seqs)
seq.setSource(tmp, seq.getSourceScope(),
SourceTracker.SRC_OTHER);
}
// store
if (!io.store(mappings, queries, seqs, _mode, output))
throw new MetaDataException(_loc.get("bad-store"));
// write to stream
if (_mappingWriter != null) {
PrintWriter out = new PrintWriter(_mappingWriter);
for (String s : output.values()) {
out.println(s);
}
out.flush();
}
}
catch (RuntimeException re) {
throw re;
} catch (Exception e) {
throw new GeneralException(e);
} finally {
clear();
}
}