Documente Academic
Documente Profesional
Documente Cultură
##create a database
show databases;
show databases;
use sqoop_db;
show tables;
###create table employees for the tab delimited file Hemp.txt available in /tmp
****************MYSQL****************************
CREATE TABLE employees
(empid int,
name varchar(15),
salary int,
deptid int);
CREATE TABLE departments
(deptid int,
deptname varchar(15));
--copy file Hemp & Hdept from desktop to /tmp
cp Desktop/Hemp.txt /tmp/
cp Desktop/Hdept.txt /tmp/
############################################
create following tables
**********************************************************************************
hadoop fs -ls
hadoop fs -ls ##we will see a directory named employees as sqoop will create one with the table
name
**********************************************************************************
****************************
Now lets transfer data from employees table to HDFS , a explicit directory name mentioned by us
(the name we mention should not be a existing directory name)
**********************************************************************************
****************************
sqoop
import
--connect jdbc:mysql://localhost/sqoop_db
--table employees
--username root
--target-dir msqp_dir/emp
-m 1
hadoop fs -ls
hadoop fs -ls
**********************************************************************************
****************************
Now lets transfer all tables from mysql sqoop_db to HDFS , delete any existing same name
directories as table names from hdfs location
**********************************************************************************
****************************
**********************************************************************************
****************
Now lets transfer all rows with salary < 5000 from employees to HDFS
**********************************************************************************
*****************
sqoop import --connect jdbc:mysql://localhost/sqoop_db --table employees
--where 'salary<5000'
--target-dir empsal
--username root -m 1
**********************************************************************************
****************************
Now lets transfer only empid,name,deptid columns from employees to HDFS
**********************************************************************************
***************************
sqoop import --connect jdbc:mysql://localhost/sqoop_db --table employees
--columns "empid,name,deptid"
--target-dir empcols
--username root -m 1
**********************************************************************************
****************************
spilt the employees data in mysql table into 4 files on hdfs while importing using sqoop
**********************************************************************************
****************************
sqoop import --connect jdbc:mysql://localhost/sqoop_db
--table employees
--warehouse-dir sq_dir /empnew
--username root
--split-by empid
-m 4
**********************************************************************************
***************************
create only table structure in hive database based on mysql table
**********************************************************************************
****************************
sqoop
create-hive-table
--connect jdbc:mysql://localhost/sqoop_db
--table emp1
--username root
**********************************************************************************
****************************
create and import data same time in hive db from mysql table
**********************************************************************************
****************************
sqoop
import
--connect jdbc:mysql://localhost/sqoop_db
--table emp2
--username root
--hive-import
-m 1
**********************************************************************************
****************************
create and import data same time in hive db from mysql table, default delimiter is none and hence
we mention a delimiter explicitly
**********************************************************************************
****************************
sqoop import
--connect jdbc:mysql://localhost/sqoop_db
--table departments
--username root
--hive-import
--fields-terminated-by '@'
-m 1
**********************************************************************************
****************************
Export data from hive table data to mysql table using sqoop
**********************************************************************************
****************************
sqoop export
--connect jdbc:mysql://localhost/sqoop_db
--table deptx
--export-dir '/user/hive/warehouse/departments'
--fields-terminated-by '@'
--username root