1) Nifi 1.2.0 Installation

	1.1 Installation Guide URL
		
		https://hortonworks.com/downloads
		
	1.2 Goto Ambari Console http://localhost:8080 to stop Nifi Service (old version)

	1.3 Upload nifi-1.2.0.3.0.2.0-76-bin.tar.gz to /root/TrainingOnHDP/ on HDP sandbox

	1.4 Login localhost:4200 and unpack the file at HDP sandbox

		cd /root/TrainingOnHDP/
		tar xvzf nifi-1.2.0.3.0.2.0-76-bin.tar.gz
		
	1.5 Open nifi.properties at /root/TrainingOnHDP/nifi-1.2.0.3.0.2.0-76/conf and make the following port change (was 8080)

		nifi.web.http.port=9090	
	
	1.6 Run Nifi
	
		/root/TrainingOnHDP/nifi-1.2.0.3.0.2.0-76/bin/nifi.sh start
		
	1.7 Browse to http://localhost:9090/nifi
	
2) MySQL configuration and NiFi database replication template upload	

	2.1 Login localhost:4200
	
	2.2 Stop MySQL
	
		service mysqld stop

	2.3 Change MySQL configuration to enable binary logging on my MySQL instance via adding the following to /etc/my.cnf, under mysqld section
	
		server_id=1
		log_bin=delta
		binlog_format=row
		binlog_do_db=nifi
		
	2.4 Restart MySQL
		
		service mysqld start
		
	2.5 Login MySQL and Create Database nifi

		mysql -u root -p
		create database nifi;
		create database nifi_bak;
		
	2.6 Change the root password if you want

		UPDATE mysql.user SET Password=PASSWORD('hadoop') WHERE User='root';
		flush privileges;

		
	2.7 Login http://localhost:9090/nifi, and upload the following template RDBMS_Database_Replication.xml

	2.8 Start RDBMS Database Replication from NiFi Workflow
	
	2.9 Create the new table

		use nifi;
		create table users ( 
			id mediumint(9) NOT NULL AUTO_INCREMENT PRIMARY KEY, 
			title text, 
			first text, 
			last text, 
			street text, 
			city text, 
			state text, 
			zip text, 	
			gender text, 
			email text, 
			username text, 
			password text, 
			phone text, 
			cell text, 
			ssn text, 
			date_of_birth timestamp NULL DEFAULT NULL, 
			reg_date timestamp NULL DEFAULT NULL, 
			large text, 
			medium text, 
			thumbnail text, 
			version text, 
			nationality text);
	
	2.10 Populate the data for the new table

		insert into users (id, title, first, last, street, city, state, zip, gender, email, username, password, phone, cell, ssn, date_of_birth, reg_date, large, medium, thumbnail, version, nationality) VALUES 
		(1, 'miss', 'marlene', 'shaw', '3450 w belt line rd', 'abilene', 'florida', '31995', 'F', 'marlene.shaw75@example.com', 'goldenpanda70', 'naughty', '(176)-908-6931', '(711)-565-2194', '800-71-1872', '1991-10-07 00:22:53', '2004-01-29 16:19:10', 'http://api.randomuser.me/portraits/women/67.jpg', 'http://api.randomuser.me/portraits/med/women/67.jpg', 'http://api.randomuser.me/portraits/thumb/women/67.jpg', '0.6', 'US'), 
		(2, 'ms', 'letitia', 'jordan', '2974 mockingbird hill', 'irvine', 'new jersey', '64361', 'F', 'letitia.jordan64@example.com', 'lazytiger614', 'aaaaa1', '(860)-602-3314', '(724)-685-3472', '548-93-7031', '1977-11-14 11:58:01', '2002-02-09 17:04:59', 'http://api.randomuser.me/portraits/women/19.jpg', 'http://api.randomuser.me/portraits/med/women/19.jpg', 'http://api.randomuser.me/portraits/thumb/women/19.jpg', '0.6', 'US'), 
		(3, 'mr', 'todd', 'graham', '5760 spring hill rd', 'garden grove', 'north carolina', '81790', 'M', 'todd.graham39@example.com', 'purplekoala484', 'paintball', '(230)-874-6532', '(186)-529-4912', '362-31-5248', '2006-07-25 05:48:01', '2004-12-05 11:26:34', 'http://api.randomuser.me/portraits/men/39.jpg', 'http://api.randomuser.me/portraits/med/men/39.jpg', 'http://api.randomuser.me/portraits/thumb/men/39.jpg', '0.6', 'US'), 
		(4, 'mr', 'seth', 'martinez', '4377 fincher rd', 'chandler', 'south carolina', '73651', 'M', 'seth.martinez82@example.com', 'bigbutterfly149', 'navy', '(122)-782-5822', '(720)-778-8541', '200-80-9087', '1981-02-28 08:22:49', '2009-08-31 12:42:57', 'http://api.randomuser.me/portraits/men/96.jpg', 'http://api.randomuser.me/portraits/med/men/96.jpg', 'http://api.randomuser.me/portraits/thumb/men/96.jpg', '0.6', 'US'), 
		(5, 'mr', 'guy', 'mckinney', '4524 hogan st', 'iowa park', 'ohio', '24140', 'M', 'guy.mckinney53@example.com', 'blueduck623', 'office', '(309)-556-7859', '(856)-764-9146', '973-37-9077', '1983-11-03 22:02:12', '2003-10-20 07:23:06', 'http://api.randomuser.me/portraits/men/24.jpg', 'http://api.randomuser.me/portraits/med/men/24.jpg', 'http://api.randomuser.me/portraits/thumb/men/24.jpg', '0.6', 'US'), 
		(6, 'ms', 'anna', 'smith', '5047 cackson st', 'rancho cucamonga', 'pennsylvania', '56486', 'F', 'anna.smith74@example.com', 'goldenfish121', 'albion', '(335)-388-7351', '(485)-150-6348', '680-20-6440', '1977-09-05 16:08:05', '2008-07-11 11:09:12', 'http://api.randomuser.me/portraits/women/89.jpg', 'http://api.randomuser.me/portraits/med/women/89.jpg', 'http://api.randomuser.me/portraits/thumb/women/89.jpg', '0.6', 'US'), 
		(7, 'mr', 'johnny', 'johnson', '7250 bruce st', 'gresham', 'new mexico', '83973', 'M', 'johnny.johnson73@example.com', 'crazyduck127', 'toast', '(142)-971-3099', '(991)-131-1582', '683-26-4133', '1988-08-12 14:04:27', '2001-04-30 15:32:34', 'http://api.randomuser.me/portraits/men/78.jpg', 'http://api.randomuser.me/portraits/med/men/78.jpg', 'http://api.randomuser.me/portraits/thumb/men/78.jpg', '0.6', 'US'), 
		(8, 'mrs', 'robin', 'white', '7882 northaven rd', 'orlando', 'connecticut', '40452', 'F', 'robin.white46@example.com', 'whitetiger371', 'elizabeth', '(311)-659-3812', '(689)-468-6420', '960-70-3399', '2003-07-05 13:09:41', '2014-10-01 02:54:46', 'http://api.randomuser.me/portraits/women/82.jpg', 'http://api.randomuser.me/portraits/med/women/82.jpg', 'http://api.randomuser.me/portraits/thumb/women/82.jpg', '0.6', 'US'), 
		(9, 'miss', 'allison', 'williams', '7648 edwards rd', 'edison', 'louisiana', '52040', 'F', 'allison.williams82@example.com', 'beautifulfish354', 'sanfran', '(328)-592-3520', '(550)-172-4018', '164-78-8160', '1983-04-09 08:00:42', '2000-01-01 07:18:54', 'http://api.randomuser.me/portraits/women/16.jpg', 'http://api.randomuser.me/portraits/med/women/16.jpg', 'http://api.randomuser.me/portraits/thumb/women/16.jpg', '0.6', 'US'), 
		(10, 'mrs', 'erika', 'king', '1171 depaul dr', 'addison', 'wisconsin', '50082', 'F', 'erika.king55@example.com', 'goldenbutterfly498', 'chill', '(635)-117-5424', '(662)-110-8448', '122-71-7145', '2003-09-19 07:26:17', '2002-12-31 00:08:43', 'http://api.randomuser.me/portraits/women/52.jpg', 'http://api.randomuser.me/portraits/med/women/52.jpg', 'http://api.randomuser.me/portraits/thumb/women/52.jpg', '0.6', 'US');	
			
	2.11 Verify the backup table 'uses' is created and populated under database nifi_bak

3) Stock Price Monitor and Dashboard
	
	3.1 Login http://localhost:9090/nifi, and upload the following template Stock_Price_Monitor_and_Dashboard.xml
	
	3.2 Start HBase via Ambari and Create a new table:

		hbase shell
		create 'stocks', 'cf'
	
	3.3 ElasticSeach Installation
	
		3.3.1 Installation URL
		
			https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.1.1.tar.gz
	
		3.3.2 Upload elasticsearch-6.1.1.tar.gz to /root/TrainingOnHDP/ on HDP sandbox

		3.3.3 Login localhost:4200 and unpack the file at HDP sandbox

			cd /root/TrainingOnHDP/
			tar xvzf elasticsearch-6.1.1.tar.gz
		
		3.3.4 Open elasticsearch.yml at /root/TrainingOnHDP/elasticsearch-6.1.1/config and make the following port change (was 9200)

			http.port: 9200
			
		3.3.5 Start Elastic Search

			useradd elastic
			passwd elastic
			su elastic
			
			/root/TrainingOnHDP/elasticsearch-6.1.1/bin/elasticsearch		
	
	3.4 Kibana Installation	
		
		3.4.1 Installation URL
		
			https://artifacts.elastic.co/downloads/kibana/kibana-6.1.1-linux-x86_64.tar.gz
	
		3.4.2 Upload kibana-6.1.1-linux-x86_64.tar.gz to /root/TrainingOnHDP/ on HDP sandbox

		3.4.3 Login localhost:4200 and unpack the file at HDP sandbox

			cd /root/TrainingOnHDP/
			tar xvzf kibana-6.1.1-linux-x86_64.tar.gz
			
		3.4.4 Open kibana.yml at /root/TrainingOnHDP/kibana-6.1.1-linux-x86_64 and make the following port change (was 5601)

			server.port: 8744
			server.host: "0.0.0.0"
			
		3.4.5 Start Kibana
		
			/root/TrainingOnHDP/kibana-6.1.1-linux-x86_64/bin/kibana
			
		3.4.6 Goto kibana console http://localhost:8744 to create index pattern stocks-index
		
		3.4.7 Run the following to change the data type
		
			curl -XPUT -H "Content-Type: application/json" localhost:9200/_template/stocks-index -d '
			{
				"index_patterns" : "stocks-index*",
				"settings" : {
					"number_of_shards" : 1
				},
				"mappings" : {
					"stocks" : {
						"properties" : {
							"price" : {
								"type" : "double"
							},
							"volume" : {
								"type" : "long"
							},
							"ts" : {
								"type" : "date",
								"format" : "yyyy-MM-dd HH:mm:ss"
							}
						}
					}
				}
			}'
		
		3.4.8 Use the following way to delete index or template if you need
		
			curl -XDELETE localhost:9200/stocks-index
		
			curl -XDELETE localhost:9200/_template/stocks-index
		

		3.4.9 Import the following json file into Kibana
		
			Stocks Kibana Dashboard.json
			
			
4) Mobile Phone Vendors Twitter Sentiment Dashboard			
			
		4.1 Follow this link to create your twitter account, comsumer key and access token: https://apps.twitter.com/
		
		4.2 Sometimes need to synchronize the sandbox clock via running the following command and regenerate consume key and secret:
		
			yum install -y ntp
			service ntpd stop
			ntpdate pool.ntp.org
			service ntpd start

		4.3 Installation ElasticSearch Interpreter for Zeppelin

			/usr/hdp/current/zeppelin-server/bin/install-interpreter.sh --name elasticsearch
			ls -la /usr/hdp/current/zeppelin-server/interpreter/	
			Login Ambaro, add the following to Zeppelin advanced configuration: ,org.apache.zeppelin.elasticsearch.ElasticsearchInterpreter
			restart Zeppelin	
			Goto Zeppelin console localhost:9995, and Create interpreter setting in 'Interpreter' menu on Zeppelin GUI 
			Make sure the elasticsearch.host in Zeppelin Interpreter is the same as network.host in /root/TrainingOnHDP/elasticsearch-6.1.1/config/elasticsearch.yml, and elasticsearch.client.type is http, elasticsearch.port	is 9200
			Then you can bind the interpreter on your note  
			Import /root/TrainingOnHDP/ETLOnApacheNiFi/dataflow/Mobile Phone Vendors Twitter Dashboard.json into Zeppelin
		
		
		4.4 Login http://localhost:9090/nifi, and upload the following template Mobile_Vendors_Twitter_Sentiment_Dashboard.xml
		
		4.5 Installation Stanford CoreNLP Server

			4.5.1 Installation and Setup: http://stanfordnlp.github.io/CoreNLP/corenlp-server.html
			
			4.5.2 Full Deployment:  http://nlp.stanford.edu/software/stanford-corenlp-full-2016-10-31.zip
			
			4.5.3 Upload stanford-corenlp-full-2016-10-31.zip to /root/TrainingOnHDP/ on HDP sandbox 
			
			4.5.4 Login localhost:4200 and unpack the file at HDP sandbox

				cd /root/TrainingOnHDP/
				unzip stanford-corenlp-full-2016-10-31.zip
				
			4.5.5 Start CoreNLP Server
				
				cd /root/TrainingOnHDP/stanford-corenlp-full-2016-10-31	
				
				java -mx4g -cp "*" edu.stanford.nlp.pipeline.StanfordCoreNLPServer -port 9000 -timeout 15000	

			4.5.6 Test

				curl --data 'This is greatest test ever.' 'http://localhost:9000/?properties={%22annotators%22%3A%22sentiment%22%2C%22outputFormat%22%3A%22json%22}' -o -			
				
				wget --post-data 'This is the worst way to test sentiment ever.' 'localhost:9000/?properties={"annotators":"sentiment","outputFormat":"json"}' -O -
				
5) NiFi Cookbook

		5.1 Login http://localhost:9090/nifi, and upload the following template NiFi_Cookbook.xml
				
				
6) Real time Data Visualization via NiFi and Web Socket

		6.1 Login http://localhost:9090/nifi, and upload the following template Real_time_Data_Visualization_via_NiFi_and_Web_Socket.xml
		
		6.2 Start Kafka via Ambari
		
		6.3 Login http://localhost:4200, run the following command to create kafka topic "nifi_websocket"

			/usr/hdp/2.6.3.0-235/kafka/bin/kafka-topics.sh --create --zookeeper localhost:2181 --replication-factor 1 --partition 1 --topic nifi_websocket				
		
		6.4 Start Zeppelin via Ambari
		
		6.5 Import Real time Data Visualization via NiFi and Web Socket.json into Zeppelin aand run the Real time Data Visualization via NiFi and Web Socket note
		
		6.6	Run the following command to publish the message to kafka topic "nifi_websocket"

			/usr/hdp/2.6.3.0-235/kafka/bin/kafka-console-producer.sh --broker-list sandbox-hdp.hortonworks.com:6667 --topic nifi_websocket		
			
		6.7	Copy and paste the following message to kafka publish console:
			
			[{"label":"Category A", "value": 35},{"label":"Category B", "value": 15},{"label":"Category C", "value": 75}]
			
		6.8 From Zeppelin note, you will the pie chart keeping changed