private Analyzer readAnalyzer()

in solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java [179:437]


  private Analyzer readAnalyzer(ConfigNode node) {

    final SolrClassLoader loader = schema.getSolrClassLoader();

    // parent node used to be passed in as "fieldtype"

    if (node == null) return null;
    String analyzerName = node.attr("class");

    // check for all of these up front, so we can error if used in
    // conjunction with an explicit analyzer class.
    List<ConfigNode> charFilterNodes = node.getAll("charFilter");
    List<ConfigNode> tokenizerNodes = node.getAll("tokenizer");
    List<ConfigNode> tokenFilterNodes = node.getAll("filter");

    if (analyzerName != null) {

      // explicitly check for child analysis factories instead of
      // just any child nodes, because the user might have their
      // own custom nodes (ie: <description> or something like that)
      if (0 != charFilterNodes.size()
          || 0 != tokenizerNodes.size()
          || 0 != tokenFilterNodes.size()) {
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR,
            "Configuration Error: Analyzer class='"
                + analyzerName
                + "' can not be combined with nested analysis factories");
      }

      try {
        // No need to be core-aware as Analyzers are not in the core-aware list
        final Class<? extends Analyzer> clazz = loader.findClass(analyzerName, Analyzer.class);
        Analyzer analyzer = clazz.getConstructor().newInstance();

        final String matchVersionStr = node.attr(LUCENE_MATCH_VERSION_PARAM);
        final Version luceneMatchVersion =
            (matchVersionStr == null)
                ? schema.getDefaultLuceneMatchVersion()
                : SolrConfig.parseLuceneVersionString(matchVersionStr);
        if (luceneMatchVersion == null) {
          throw new SolrException(
              SolrException.ErrorCode.SERVER_ERROR,
              "Configuration Error: Analyzer '"
                  + clazz.getName()
                  + "' needs a '"
                  + IndexSchema.LUCENE_MATCH_VERSION_PARAM
                  + "' parameter");
        }
        return analyzer;
      } catch (Exception e) {
        log.error("Cannot load analyzer: {}", analyzerName, e);
        throw new SolrException(
            SolrException.ErrorCode.SERVER_ERROR, "Cannot load analyzer: " + analyzerName, e);
      }
    }

    // Load the CharFilters

    final ArrayList<CharFilterFactory> charFilters = new ArrayList<>();
    AbstractPluginLoader<CharFilterFactory> charFilterLoader =
        new AbstractPluginLoader<>(
            "[schema.xml] analyzer/charFilter", CharFilterFactory.class, false, false) {

          @Override
          protected CharFilterFactory create(
              SolrClassLoader loader, String name, String className, ConfigNode node)
              throws Exception {
            final Map<String, String> params = new HashMap<>(node.attributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(
                LUCENE_MATCH_VERSION_PARAM,
                parseConfiguredVersion(configuredVersion, CharFilterFactory.class.getSimpleName())
                    .toString());
            CharFilterFactory factory;
            if (Objects.nonNull(name)) {
              factory = CharFilterFactory.forName(name, params);
              if (Objects.nonNull(className)) {
                log.error(
                    "Both of name: {} and className: {} are specified for charFilter.",
                    name,
                    className);
                throw new SolrException(
                    SolrException.ErrorCode.SERVER_ERROR,
                    "Cannot create charFilter: Both of name and className are specified.");
              }
            } else if (Objects.nonNull(className)) {
              factory =
                  loader.newInstance(
                      className,
                      CharFilterFactory.class,
                      getDefaultPackages(),
                      new Class<?>[] {Map.class},
                      new Object[] {params});
            } else {
              log.error("Neither of name or className is specified for charFilter.");
              throw new SolrException(
                  SolrException.ErrorCode.SERVER_ERROR,
                  "Cannot create charFilter: Neither of name or className is specified.");
            }
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
          }

          @Override
          protected void init(CharFilterFactory plugin, ConfigNode node) throws Exception {
            if (plugin != null) {
              charFilters.add(plugin);
            }
          }

          @Override
          protected CharFilterFactory register(String name, CharFilterFactory plugin) {
            return null; // used for map registration
          }
        };

    charFilterLoader.load(loader, charFilterNodes);

    // Load the Tokenizer
    // Although an analyzer only allows a single Tokenizer, we load a list to make sure
    // the configuration is ok

    final ArrayList<TokenizerFactory> tokenizers = new ArrayList<>(1);
    AbstractPluginLoader<TokenizerFactory> tokenizerLoader =
        new AbstractPluginLoader<>(
            "[schema.xml] analyzer/tokenizer", TokenizerFactory.class, false, false) {

          @Override
          protected TokenizerFactory create(
              SolrClassLoader loader, String name, String className, ConfigNode node)
              throws Exception {
            final Map<String, String> params = new HashMap<>(node.attributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(
                LUCENE_MATCH_VERSION_PARAM,
                parseConfiguredVersion(configuredVersion, TokenizerFactory.class.getSimpleName())
                    .toString());
            TokenizerFactory factory;
            if (Objects.nonNull(name)) {
              factory = TokenizerFactory.forName(name, params);
              if (Objects.nonNull(className)) {
                log.error(
                    "Both of name: {} and className: {} are specified for tokenizer.",
                    name,
                    className);
                throw new SolrException(
                    SolrException.ErrorCode.SERVER_ERROR,
                    "Cannot create tokenizer: Both of name and className are specified.");
              }
            } else if (Objects.nonNull(className)) {
              factory =
                  loader.newInstance(
                      className,
                      TokenizerFactory.class,
                      getDefaultPackages(),
                      new Class<?>[] {Map.class},
                      new Object[] {params});
            } else {
              log.error("Neither of name or className is specified for tokenizer.");
              throw new SolrException(
                  SolrException.ErrorCode.SERVER_ERROR,
                  "Cannot create tokenizer: Neither of name or className is specified.");
            }
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
          }

          @Override
          protected void init(TokenizerFactory plugin, ConfigNode node) throws Exception {
            if (!tokenizers.isEmpty()) {
              throw new SolrException(
                  SolrException.ErrorCode.SERVER_ERROR,
                  "The schema defines multiple tokenizers for: " + node);
            }
            tokenizers.add(plugin);
          }

          @Override
          protected TokenizerFactory register(String name, TokenizerFactory plugin) {
            return null; // used for map registration
          }
        };

    tokenizerLoader.load(loader, tokenizerNodes);

    // Make sure something was loaded
    if (tokenizers.isEmpty()) {
      throw new SolrException(
          SolrException.ErrorCode.SERVER_ERROR, "analyzer without class or tokenizer");
    }

    // Load the Filters

    final ArrayList<TokenFilterFactory> filters = new ArrayList<>();

    AbstractPluginLoader<TokenFilterFactory> filterLoader =
        new AbstractPluginLoader<>(
            "[schema.xml] analyzer/filter", TokenFilterFactory.class, false, false) {
          @Override
          protected TokenFilterFactory create(
              SolrClassLoader loader, String name, String className, ConfigNode node)
              throws Exception {
            final Map<String, String> params = new HashMap<>(node.attributes());
            String configuredVersion = params.remove(LUCENE_MATCH_VERSION_PARAM);
            params.put(
                LUCENE_MATCH_VERSION_PARAM,
                parseConfiguredVersion(configuredVersion, TokenFilterFactory.class.getSimpleName())
                    .toString());
            TokenFilterFactory factory;
            if (Objects.nonNull(name)) {
              factory = TokenFilterFactory.forName(name, params);
              if (Objects.nonNull(className)) {
                log.error(
                    "Both of name: {} and className: {} are specified for tokenFilter.",
                    name,
                    className);
                throw new SolrException(
                    SolrException.ErrorCode.SERVER_ERROR,
                    "Cannot create tokenFilter: Both of name and className are specified.");
              }
            } else if (Objects.nonNull(className)) {
              factory =
                  loader.newInstance(
                      className,
                      TokenFilterFactory.class,
                      getDefaultPackages(),
                      new Class<?>[] {Map.class},
                      new Object[] {params});
            } else {
              log.error("Neither of name or className is specified for tokenFilter.");
              throw new SolrException(
                  SolrException.ErrorCode.SERVER_ERROR,
                  "Cannot create tokenFilter: Neither of name or className is specified.");
            }
            factory.setExplicitLuceneMatchVersion(null != configuredVersion);
            return factory;
          }

          @Override
          protected void init(TokenFilterFactory plugin, ConfigNode node) throws Exception {
            if (plugin != null) {
              filters.add(plugin);
            }
          }

          @Override
          protected TokenFilterFactory register(String name, TokenFilterFactory plugin)
              throws Exception {
            return null; // used for map registration
          }
        };
    filterLoader.load(loader, tokenFilterNodes);

    return new TokenizerChain(
        charFilters.toArray(new CharFilterFactory[0]),
        tokenizers.get(0),
        filters.toArray(new TokenFilterFactory[0]));
  }