在学习sparksql中连接jdbc报错
Exception in thread "main" java.sql.SQLException: No suitable driver
at java.sql.DriverManager.getDriver(DriverManager.java:315)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions$$anonfun$7.apply(JDBCOptions.scala:84)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions$$anonfun$7.apply(JDBCOptions.scala:84)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions.
at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions.
at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:60)
at org.apache.spark.sql.execution.datasources.DataSource.write(DataSource.scala:469)
at org.apache.spark.sql.execution.datasources.SaveIntoDataSourceCommand.run(SaveIntoDataSourceCommand.scala:48)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult$lzycompute(commands.scala:58)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.sideEffectResult(commands.scala:56)
at org.apache.spark.sql.execution.command.ExecutedCommandExec.doExecute(commands.scala:74)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:117)
at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:138)
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:135)
at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:116)
at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:92)
at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:92)
at org.apache.spark.sql.DataframeWriter.runCommand(DataframeWriter.scala:609)
at org.apache.spark.sql.DataframeWriter.save(DataframeWriter.scala:233)
at org.apache.spark.sql.DataframeWriter.jdbc(DataframeWriter.scala:460)
at day03.Test03_jdbc_mysql$.write(Test03_jdbc_mysql.scala:24)
at day03.Test03_jdbc_mysql$.main(Test03_jdbc_mysql.scala:9)
at day03.Test03_jdbc_mysql.main(Test03_jdbc_mysql.scala)
Process finished with exit code 1
源代码是
package day03import java.util.Propertiesimport org.apache.spark.sql.{Dataframe, SparkSession}object Test03_jdbc_mysql { def main(args: Array[String]): Unit = { write() } def write() : Unit ={ val spark = SparkSession.builder().master("local[*]").appName("readFile").getOrCreate() val frame: Dataframe = spark.read.option("sep", "|").option("header", "true").csv("data/student.csv") val url = "jdbc:mysql://localhost:3306/first?" val table = "student" val prop = new Properties() prop.setProperty("user","root") prop.put("password","123456") //保存的时候,使用Dataframe对象调用 frame.write.jdbc(url,table,prop) spark.stop() }}
报错原因,没有添加mysql的驱动依赖
要在pom.xml中添加mysql版本的依赖
然后再次运行就成功了