hiveContext读取hive数据

来源:互联网 发布:郑州科技市场数据恢复 编辑:程序博客网 时间:2024/06/09 07:58
import java.io.{IOException, FileOutputStream, OutputStream, File}import java.util.Propertiesimport org.apache.spark.sql.Rowimport org.apache.spark.sql.hive.HiveContextimport org.apache.spark.{SparkContext, SparkConf}import org.apache.log4j.{Level, Logger}/**  * Created by shuilin on 2017/12/19.  */object HiveSourceTest {  val file = new File("E:" + File.separator + "hello.txt")  var out:OutputStream=new FileOutputStream(file)  def main(args: Array[String]) {    Logger.getLogger("org.apache.spark").setLevel(Level.ERROR)    val sparkConf = new SparkConf().setAppName("hivetest").setMaster("local")    val sparkContext = new SparkContext(sparkConf)    val hiveContext = new HiveContext(sparkContext)    val props=getProperties()    val sql_text=props.getProperty("sql_text")    println(sql_text)    val data=hiveContext.sql(sql_text)    /*data.foreach(row =>{      println(row.toString())      val rowstring = row.toString().getBytes()      out.write(rowstring)      out.write("\r\n".getBytes)      //writeFile("D:\\test.sql",row.toString())    })*/    data.foreach(writeFile)    out.close()    sparkContext.stop()  }  def getProperties(): Properties = {    val props = new Properties()    val in = this.getClass.getClassLoader.getResourceAsStream("sql.properties")    props.load(in)    props  }  def writeFile(row:Row): Unit ={    val rowstring=row.toString()    out.write(rowstring.getBytes)    out.write("\r\n".getBytes)  }}
原创粉丝点击