/usr/local/service/ 路径下。[root@172 ~]# su hadoop[hadoop@172 ~]# cd /usr/local/service/hive
[hadoop@172 hive]$ hivehive> create database hive_from_sqoop;OKTime taken: 0.167 seconds
[hadoop@172 hive]# cd /usr/local/service/sqoop[hadoop@172 sqoop]$ bin/sqoop-import --connect jdbc:mysql://$mysqlIP/test --usernameroot -P --table sqoop_test_back --hive-database hive_from_sqoop --hive-import --hive-table hive_from_sqoop
hive> select * from hive_from_sqoop;OK1 first 2018-07-03 16:07:46.0 spark2 second 2018-07-03 15:30:57.0 mr3 third 2018-07-03 15:31:07.0 yarn4 forth 2018-07-03 15:39:38.0 hbase5 fifth 2018-07-03 16:02:29.0 hive6 sixth 2018-07-03 16:09:58.0 sqoopTime taken: 1.245 seconds, Fetched: 6 row(s)
[root@172 ~]# su hadoop[hadoop@172 ~]# cd /usr/local/service/hive
#!/bin/bashMAXROW=1000000 #指定生成数据行数for((i = 0; i < $MAXROW; i++))doecho $RANDOM, \\"$RANDOM\\"done
[hadoop@172 hive]$ ./gen_data.sh > hive_test.data
[hadoop@172 hive]$ hdfs dfs -put ./hive_test.data /$hdfspath
[hadoop@172 hive]$ bin/hivehive> create database hive_to_sqoop; #创建数据库 hive_to_sqoopOKTime taken: 0.176 secondshive> use hive_to_sqoop; #切换数据库OKTime taken: 0.176 secondshive> create table hive_test (a int, b string)hive> ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';#创建数据表 hive_test, 并指定列分割符为’,’OKTime taken: 0.204 secondshive> load data inpath "/$hdfspath/hive_test.data" into table hive_test; #导入数据
quit命令退出 Hive 数据仓库。连接关系型数据库并创建对应的表格:[hadoop@172 hive]$ mysql -h $mysqlIP –pEnter password:
mysql> create table table_from_hive (a int,b varchar(255));
[hadoop@172 hive]$ cd ../sqoop/bin[hadoop@172 bin]$ ./sqoop-export --connect jdbc:mysql://$mysqlIP/test --username root -P--table table_from_hive --export-dir /usr/hive/warehouse/hive_to_sqoop.db/hive_test
[hadoop@172 hive]$ cd ../sqoop/bin[hadoop@172 bin]$ ./sqoop-export --connect jdbc:mysql://$mysqlIP/test --username root -P--table table_from_hive --hcatalog-database hive_to_sqoop --hcatalog-table hive_test
[hadoop@172 hive]$ mysql -h $mysqlIP –p #连接 MySQLEnter password:mysql> use test;Database changedmysql> select count(*) from table_from_hive; #现在表中有1000000条数据+----------+| count(*) |+----------+| 1000000 |+----------+1 row in set (0.03 sec)mysql> select * from table_from_hive limit 10; #查看表中前10条记录+-------+----------+| a | b |+-------+----------+| 28523 | "3394" || 31065 | "24583" || 399 | "23629" || 18779 | "8377" || 25376 | "30798" || 20234 | "22048" || 30744 | "32753" || 21423 | "6117" || 26867 | "16787" || 18526 | "5856" |+-------+----------+10 rows in set (0.00 sec)
[hadoop@172 bin]$ ./sqoop-export --help
[root@172 ~]# su hadoop[hadoop@172 ~]# cd /usr/local/service/hive
[hadoop@172 hive]$ hivehive> use hive_to_sqoop;OKTime taken: 0.013 secondshive> create table if not exists orc_test(a int,b string) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' stored as orc;
hive> show create table orc_test;OKCREATE TABLE `orc_test`(`a` int,`b` string)ROW FORMAT SERDE'org.apache.hadoop.hive.ql.io.orc.OrcSerde'WITH SERDEPROPERTIES ('field.delim'=',','serialization.format'=',')STORED AS INPUTFORMAT'org.apache.hadoop.hive.ql.io.orc.OrcInputFormat'OUTPUTFORMAT'org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat'LOCATION'hdfs://HDFS2789/usr/hive/warehouse/hive_to_sqoop.db/orc_test'TBLPROPERTIES ('COLUMN_STATS_ACCURATE'='{\\"BASIC_STATS\\":\\"true\\"}','numFiles'='0','numRows'='0','rawDataSize'='0','totalSize'='0','transient_lastDdlTime'='1533563293')Time taken: 0.041 seconds, Fetched: 21 row(s)
hive> insert into table orc_test select * from hive_test;
select指令查看表格中的数据。[hadoop@172 hive]$ mysql -h $mysqlIP –pEnter password:
mysql> create table table_from_orc (a int,b varchar(255));
[hadoop@172 hive]$ cd ../sqoop/bin[hadoop@172 bin]$ ./sqoop-export --connect jdbc:mysql://$mysqlIP/test --username root -P--table table_from_orc --hcatalog-database hive_to_sqoop --hcatalog-table orc_test
mysql> select count(*) from table_from_orc;+----------+| count(*) |+----------+| 1000000 |+----------+1 row in set (0.24 sec)mysql> select * from table_from_orc limit 10;+-------+----------+| a | b |+-------+----------+| 28523 | "3394" || 31065 | "24583" || 399 | "23629" || 18779 | "8377" || 25376 | "30798" || 20234 | "22048" || 30744 | "32753" || 21423 | "6117" || 26867 | "16787" || 18526 | "5856" |+-------+----------+10 rows in set (0.00 sec)
文档反馈