Maven依赖
<!--添加spark对hive的支持的jar包--> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_2.11</artifactId> <version>2.1.1</version> </dependency>
hive-site.xml配置文件
<?xml version="1.0" ?><?xml-stylesheet type="text/xsl" href="configuration.xsl"?><configuration> <property> <name>javax.jdo.option.ConnectionURL</name> <!-- 这个库是元数据所在的库--> <value>jdbc:mysql://zjj101:3306/hive?createDatabaseIfNotExist=true</value> <description>JDBC connect string for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionDriverName</name> <value>com.mysql.jdbc.Driver</value> <description>Driver class name for a JDBC metastore</description> </property> <property> <name>javax.jdo.option.ConnectionUserName</name> <value>root</value> <description>username to use against metastore database</description> </property> <property> <name>javax.jdo.option.ConnectionPassword</name> <value>root</value> <description>password to use against metastore database</description> </property></configuration>
代码
package com.hiveimport org.apache.spark.sql.SparkSessionobject HiveRead { def main(args: Array[String]): Unit = { val spark: SparkSession = SparkSession .builder() .master("local[*]") .appName("HiveRead") // 添加支持外置hive .enableHiveSupport() .getOrCreate() // spark.sql("show databases").show spark.sql("use gmall").show // spark.sql("show tables").show spark.sql("select * from emp").show spark.close() }}
控制台打印结果
++||+++++---+----+| id|name|+---+----+| 1| aaa|| 2| bbb|| 3| ccc|+---+----+