只需对spark submit命令和代码进行以下更改:
测验py公司
import sys
from pyspark import SparkContext
sc = SparkContext("local", "Simple App")
from pyspark.sql import SQLContext, Row
sqlContext = SQLContext(sc)
db_name = sys.argv[1]
table_name = sys.argv[2]
file_name = sys.argv[3]
df = sqlContext.read.format("jdbc").option("url","jdbc:sqlserver://server:port").option("databaseName",db_name).option("driver","com.microsoft.sqlserver.jdbc.SQLServerDriver").option("dbtable",table_name).option("user","uid").option("password","pwd").load()
df.registerTempTable("test")
df.write.format("com.databricks.spark.csv").save(file_name)
spart-submit test.py <db_name> <table_name> <file_name>