You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
This repo is archived. You can view files and clone it, but cannot push or open issues/pull-requests.
ansible-role-nginx/hadoop/roles/hadoop_slaves/tasks/slaves.yml

53 lines
1.6 KiB

---
# Playbook for Hadoop slave servers
- name: Install the datanode and tasktracker packages
yum: name=${item} state=installed
with_items:
- hadoop-0.20-mapreduce-tasktracker
- hadoop-hdfs-datanode
- name: Copy the hadoop configuration files
template: src=roles/common/templates/hadoop_ha_conf/${item}.j2 dest=/etc/hadoop/conf/${item}
with_items:
- core-site.xml
- hadoop-metrics.properties
- hadoop-metrics2.properties
- hdfs-site.xml
- log4j.properties
- mapred-site.xml
- slaves
- ssl-client.xml.example
- ssl-server.xml.example
when: ha_enabled
notify: restart hadoop slave services
- name: Copy the hadoop configuration files for non ha
template: src=roles/common/templates/hadoop_conf/${item}.j2 dest=/etc/hadoop/conf/${item}
with_items:
- core-site.xml
- hadoop-metrics.properties
- hadoop-metrics2.properties
- hdfs-site.xml
- log4j.properties
- mapred-site.xml
- slaves
- ssl-client.xml.example
- ssl-server.xml.example
when: not ha_enabled
notify: restart hadoop slave services
- name: Create the data directory for the slave nodes to store the data
file: path={{ item }} owner=hdfs group=hdfs state=directory
with_items: hadoop.dfs_datanode_data_dir
- name: Create the data directory for the slave nodes for mapreduce
file: path={{ item }} owner=mapred group=mapred state=directory
with_items: hadoop.mapred_local_dir
- name: start hadoop slave services
service: name={{ item }} state=started
with_items:
- hadoop-0.20-mapreduce-tasktracker
- hadoop-hdfs-datanode