Can anyone explain how to export partitioned table from hive to MYSQL database?
And how to import into a hive partitioned table from mysql?
I have read the documents in google but not sure on the latest techniques that can be used.
Thanks
CREATE TABLE `mon2`
(`id` int, `name` varchar(43), `age` int, `sex` varchar(334))
1,mahesh,23,m
2,ramesh,32,m
3,prerna,43,f
4,jitu,23,m
5,sandip,32,m
6,gps,43,f
mysql> source location_of_your_csv/abc.csv
sqoop import \
--connect jdbc:mysql://localhost:3306/apr \
--username root \
--password root \
-e "select id, name, age from mon2 where sex='m' and \$CONDITIONS" \
--target-dir /user/hive/warehouse/hive_part \
--split-by id \
--hive-overwrite \
--hive-import \
--create-hive-table \
--hive-partition-key sex \
--hive-partition-value 'm' \
--fields-terminated-by ',' \
--hive-table mar.hive_part \
--direct
create table hive_temp
(id int, name string, age int, gender string)
row format delimited fields terminated by ',';
load data local inpath '/home/zicone/Documents/pig_to_hbase/stack.csv' into table hive_temp;
create table hive_part1
(id int, name string, age int)
partitioned by (gender string)
row format delimited fields terminated by ',';
alter table hive_part1 add partition(gender='m');
insert overwrite table hive_part1 partition(gender='m')
select id, name, age from hive_temp where gender='m';
create a table in mysql
mysql> create table mon3 like mon2;
sqoop export \
--connect jdbc:mysql://localhost:3306/apr \
--table mon3 \
--export-dir /user/hive/warehouse/mar.db/hive_part1/gender=m \
-m 1 \
--username root \
--password root
now goto mysql terminal and run
select * from mon3;
hope it works for you :)