ansible/roles/hadoop/tasks/main.yml (41 lines of code) (raw):

# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # - name: "install hadoop tarball" unarchive: src={{ tarballs_dir }}/{{ hadoop_tarball }} dest={{ install_dir }} creates={{ hadoop_home }} copy=yes - name: "configure hadoop with templates" template: src={{ item }} dest={{ hadoop_home }}/etc/hadoop/{{ item }} with_items: - core-site.xml - hdfs-site.xml - yarn-site.xml - mapred-site.xml - hadoop-metrics2.properties - name: "configure hadoop 2" template: src={{ item }} dest={{ hadoop_home }}/etc/hadoop/{{ item }} with_items: - slaves when: hadoop_major_version == '2' - name: "configure hadoop 3" template: src={{ item }} dest={{ hadoop_home }}/etc/hadoop/{{ item }} with_items: - workers when: hadoop_major_version == '3' # This is currently needed to run hadoop with Java 11 (see https://github.com/apache/fluo-muchos/issues/266) - name: "Copy javax.activation-api (when Hadoop 3 and Java 11 are used)" synchronize: src={{ user_home }}/mvn_dep/ dest={{ hadoop_home }}/share/hadoop/common/lib/ when: hadoop_major_version == '3' and java_product_version == 11 - name: "setup hadoop short circuit socket dir" file: path=/var/lib/hadoop-hdfs state=directory owner={{ cluster_user }} group={{ cluster_group }} mode=0755 become: yes - name: "Configure hadoop log dir" replace: path: "{{ hadoop_home }}/etc/hadoop/hadoop-env.sh" regexp: '.*export\s+HADOOP_LOG_DIR.*' replace: "export HADOOP_LOG_DIR={{ worker_data_dirs[0] }}/logs/hadoop" - name: "Create hadoop log dir" file: path={{ worker_data_dirs[0] }}/logs/hadoop state=directory - name: Insert HADOOP_OPTIONAL_TOOLS & HADOOP_OPTS in hadoop-env.sh blockinfile: path: "{{ hadoop_home }}/etc/hadoop/hadoop-env.sh" insertafter: EOF block: | export HADOOP_OPTIONAL_TOOLS=hadoop-azure export HADOOP_OPTS="-Dorg.wildfly.openssl.path=/usr/lib64 ${HADOOP_OPTS}" when: cluster_type == 'azure' and hadoop_major_version == '3' and use_adlsg2