1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43
| spark.read.格式("路径") spark.read.json("/usr/file/json/emp.json")
spark.read.format("csv") .option("header", "false") .option("mode", "FAILFAST") .option("inferSchema", "true") .load("/usr/file/csv/dept.csv") .show()
df.write.format("csv").mode("overwrite").option("sep", "\t").save("/tmp/csv/dept2")
spark.read.format("json").option("mode", "FAILFAST").load("/usr/file/json/dept.json").show(5)
df.write.format("json").mode("overwrite").save("/tmp/spark/json/dept")
spark.read.format("parquet").load("/usr/file/parquet/dept.parquet").show(5)
df.write.format("parquet").mode("overwrite").save("/tmp/spark/parquet/dept")
spark.read .format("jdbc") .option("driver", "com.mysql.jdbc.Driver") .option("url", "jdbc:mysql://127.0.0.1:3306/mysql") .option("dbtable", "help_keyword") .option("user", "root").option("password","root").load().show(10)
val df = spark.read.format("json").load("/usr/file/json/emp.json") df.write .format("jdbc") .option("url", "jdbc:mysql://127.0.0.1:3306/mysql") .option("user", "root").option("password", "root") .option("dbtable", "emp") .save()
|