package l847164916
import java.sql.{DriverManager, ResultSet}
import java.util.Properties
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.{Row, SQLContext, SaveMode}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.types._
/**
* Created by Administrator on 2016/9/2.
*/
object Test {
def main(args: Array[String]): Unit = {
val url = "jdbc:mysql://xxxxxxx:3306/hyn_profile"
val prop = new Properties()
prop.setProperty("user", "root")
prop.setProperty("password", "xxxxx")
prop.setProperty("driver","com.mysql.jdbc.Driver")
val conf = new SparkConf().setAppName("test").setMaster("local")
val sc = new SparkContext(conf)
val sqlContext = new SQLContext(sc)
val schema = StructType(
StructField("name",StringType) ::
StructField("rate1",DoubleType) ::
StructField("rate2",DoubleType)
:: Nil
)
val rdd1 = sc.textFile("D:/LCH/hellomoto/data").map{r => r.split(" ")}.map{r => (r(0),r(1))}
val rdd2 = sc.textFile("D:/LCH/hellomoto/data1").map{r => r.split(" ")}.map{r => (r(0),r(1))}
//1.外連線才會產生option型別
//2.getOrElse()設定join後空值的預設值,注意型別要與schema匹配
val rdd = rdd1.fullOuterJoin(rdd2).map{case ((name,(rate1,rate2)))=>(name,rate1,rate2)}.map(r=> Row.apply(r._1,r._2.getOrElse(0.0).toString.toDouble,r._3.getOrElse(0.0).toString.toDouble))
val df = sqlContext.createDataFrame(rdd,schema)
df.write.mode(SaveMode.Overwrite).jdbc(url,"test",prop)
/*
(二)隱式轉換DF:
import sqlContext.implicits._
val df = rdd1.fullOuterJoin(rdd2).map{case ((name,(rate1,rate2)))=>(name,rate1,rate2)}.toDF("name","rate1","rate2")
df.write.mode(SaveMode.Overwrite).jdbc(url,"test",prop)
*/
}
}