$ mysql -uroot -p000000 mysql> create database company; mysql> create table company.staff(id int(4) primary key not null auto_increment, name varchar(255), sex varchar(255)); mysql> insert into company.staff(name, sex) values(‘Thomas‘, ‘Male‘); mysql> insert into company.staff(name, sex) values(‘Catalina‘, ‘FeMale‘);
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --table staff --target-dir /user/company --delete-target-dir --num-mappers 1 --fields-terminated-by "\t"
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --target-dir /user/company --delete-target-dir --num-mappers 1 --fields-terminated-by "\t" --query ‘select name,sex from staff where id <=1 and $CONDITIONS;‘
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --target-dir /user/company --delete-target-dir --num-mappers 1 --fields-terminated-by "\t" --columns id,sex --table staff
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --target-dir /user/company --delete-target-dir --num-mappers 1 --fields-terminated-by "\t" --table staff --where "id=1"
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --table staff --num-mappers 1 --hive-import --fields-terminated-by "\t" --hive-overwrite --hive-table staff_hive
$ bin/sqoop import --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --table company --columns "id,name,sex" --column-family "info" --hbase-create-table --hbase-row-key "id" --hbase-table "hbase_company" --num-mappers 1 --split-by id
hbase> create ‘hbase_company,‘info‘
hbase> scan ‘hbase_company’
$ bin/sqoop export --connect jdbc:mysql://hadoop102:3306/company \ --username root --password 000000 --table staff --num-mappers 1 --export-dir /user/hive/warehouse/staff_hive --input-fields-terminated-by "\t"
$ mkdir opt
$ touch opt/job_HDFS2RDBMS.opt
$ vi opt/job_HDFS2RDBMS.opt export --connect jdbc:mysql://hadoop102:3306/company --username root --password 000000 --table staff --num-mappers 1 --export-dir /user/hive/warehouse/staff_hive --input-fields-terminated-by "\t"
$ bin/sqoop --options-file opt/job_HDFS2RDBMS.opt
原文:https://www.cnblogs.com/qiu-hua/p/13401258.html