代码之家  ›  专栏  ›  技术社区  ›  Fast Chip

从Spark连接到sql数据库

  •  1
  • Fast Chip  · 技术社区  · 6 年前

    scala> import org.apache.spark.sql.SQLContext                                                                                                                                                                      
    import org.apache.spark.sql.SQLContext
    
    scala> val sqlcontext = new org.apache.spark.sql.SQLContext(sc)                                                                                                                                                    
    warning: there was one deprecation warning; re-run with -deprecation for details
    sqlcontext: org.apache.spark.sql.SQLContext = org.apache.spark.sql.SQLContext@2bf4fa1
    
    scala> val dataframe_mysql = sqlcontext.read.format("jdbc").option("url", "jdbc:sqlserver:192.168.103.64/DRE").option("driver", "com.microsoft.sqlserver.jdbc.SQLServerDriver").option("dbtable", "NCentralAlerts")
    .option("user", "sqoop").option("password", "hadoop").load()
    java.lang.ClassNotFoundException: com.microsoft.sqlserver.jdbc.SQLServerDriver
      at scala.reflect.internal.util.AbstractFileClassLoader.findClass(AbstractFileClassLoader.scala:62)
      at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
      at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
      at org.apache.spark.sql.execution.datasources.jdbc.DriverRegistry$.register(DriverRegistry.scala:45)
      at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions$$anonfun$6.apply(JDBCOptions.scala:79)
      at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions$$anonfun$6.apply(JDBCOptions.scala:79)
      at scala.Option.foreach(Option.scala:257)
      at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions.<init>(JDBCOptions.scala:79)
      at org.apache.spark.sql.execution.datasources.jdbc.JDBCOptions.<init>(JDBCOptions.scala:35)
      at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:34)
      at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:340)
      at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
      at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
      at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:164)
      ... 49 elided
    

    1 回复  |  直到 6 年前
        1
  •  3
  •   Chitral Verma    6 年前

    我从日志中看到,您正在尝试使用sparkshell运行此程序。假设你手头有罐子。起点 spark-shell

    spark-shell --jars /path/to/driver.jar
    

    这样,它将被添加到类路径中,并且您将能够使用驱动程序。