su hadoopcd /usr/local/service/hive
#!/bin/bashMAXROW=1000000 #指定生成数据行数for((i = 0; i < $MAXROW; i++))doecho $RANDOM, \\"$RANDOM\\"done
chmod +x 脚本名称./gen_data.sh > hive_test.data
hdfs dfs -put ./hive_test.data /${hdfspath}
hdfs dfs -put ./hive_test.data cosn://${bucketname}/
hive
hive> show databases;OKdefaultTime taken: 0.26 seconds, Fetched: 1 row(s)
hive> create database if not exists test;OKTime taken: 0.176 seconds
hive> use test;OKTime taken: 0.176 seconds
hive> create table hive_test (a int, b string)hive> ROW FORMAT DELIMITED FIELDS TERMINATED BY ',';-- 创建数据表 hive_test, 并指定列分割符为','OKTime taken: 0.204 seconds
hive> show tables;OKhive_testTime taken: 0.176 seconds, Fetched: 1 row(s)
hive> load data inpath "/${hdfspath}/hive_test.data" into table hive_test;
hive> load data inpath "cosn://${bucketname}/hive_test.data" into table hive_test;
hive>load data local inpath "/${localpath}/hive_test.data" into table hive_test;
hive> select * from hive_test limit 10;OK30847 "31583"14887 "32053"19741 "16590"8104 "20321"29030 "32724"27274 "5231"10028 "22594"924 "32569"10603 "27927"4018 "30518"Time taken: 2.133 seconds, Fetched: 10 row(s)
hive> select count(*) from hive_test;OK1000000Time taken: 18.504 seconds, Fetched: 1 row(s)
hive> drop table if exists hive_test;Moved: 'hdfs://HDFS/usr/hive/warehouse/hive_test' to trash at: hdfs://HDFS/user/hadoop/.Trash/CurrentOKTime taken: 2.327 seconds
hive> drop database if exists test;OKTime taken: 0.531 seconds
文档反馈