第21课:scala文件和xml操作实战及spark源码鉴赏

来源:互联网 发布:wto数据库 编辑:程序博客网 时间:2024/06/04 19:24

第21课:scala文件和xml操作实战及spark源码鉴赏
1 spark源码的文件和xml使用的源码鉴赏
2 scala文件和xml操作实战

 

 

 

 

 

 

 

 

/** Serialize an object using Java serialization */
  def serialize[T](o: T): Array[Byte] = {
    val bos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(bos)
    oos.writeObject(o)
    oos.close()
    bos.toByteArray
  }

  /** Deserialize an object using Java serialization */
  def deserialize[T](bytes: Array[Byte]): T = {
    val bis = new ByteArrayInputStream(bytes)
    val ois = new ObjectInputStream(bis)
    ois.readObject.asInstanceOf[T]
  }

 

 

 

 

 package com.dt.spark.scala.bascis

 import java.io.ByteArrayInputStream
 import java.io.ByteArrayOutputStream
 import java.io.ObjectInputStream
 import java.io.ObjectOutputStream
 import java.io.FileOutputStream
 import java.io.FileInputStream
 import scala.io.Source

@SerialVersionUID(99L)   class DTSpark(val name:String) extends Serializable


object HelloFileOpps extends App{
  val dtspark = new DTSpark("spark")
  
   /** Serialize an object using Java serialization */
  def serialize[T](o: T)  = {
  //  val bos = new ByteArrayOutputStream()
    val bos = new FileOutputStream("D:\\spark.txt")
    val oos = new ObjectOutputStream(bos)
    oos.writeObject(o)
    oos.close()
  //  bos.toByteArray
  }
  //   println(serialize(dtspark))
  // println(new String(serialize(dtspark)))
serialize(dtspark)
  /** Deserialize an object using Java serialization */
  def deserialize[T](bytes: Array[Byte]): T = {
   // val bis = new ByteArrayInputStream(bytes)
    val bis = new FileInputStream("D:\\spark.txt")
    val ois = new ObjectInputStream(bis)
    ois.readObject.asInstanceOf[T]
  }
  
  // println(deserialize[DTSpark](serialize[DTSpark](dtspark)).name)
 println(deserialize[DTSpark]( null).name)

 for (line <- Source.fromFile("D:\\file.txt","GBK").getLines()){
   println(line)
 }
 
  println(Source.fromFile("D:\\file.txt","GBK").mkString)
 
  for (item <- Source.fromFile("D:\\file.txt","GBK") ) println(item)
   println("================")
  println(Source.fromURL("http://spark.apache.org/","UTF-8").mkString)
 
 
 
 
 //////////////////////
  
   
   /** Serialize an object using Java serialization */
 /* def serialize[T](o: T): Array[Byte] = {
    val bos = new ByteArrayOutputStream()
    val oos = new ObjectOutputStream(bos)
    oos.writeObject(o)
    oos.close()
    bos.toByteArray
  }
  //   println(serialize(dtspark))
   println(new String(serialize(dtspark)))

  *//** Deserialize an object using Java serialization *//*
  def deserialize[T](bytes: Array[Byte]): T = {
    val bis = new ByteArrayInputStream(bytes)
    val ois = new ObjectInputStream(bis)
    ois.readObject.asInstanceOf[T]
  }
  
   println(deserialize[DTSpark](serialize[DTSpark](dtspark)).name)
*/
 
 
 
 
 
 }

 

结果:

 

 

spark
生活不止眼前的苟且
还有诗和远方
寻找那片海
生活不止眼前的苟且
还有诗和远方
寻找那片海








 







 







================
<!DOCTYPE html>
<html lang="en">
<head>
  <meta charset="utf-8">
  <meta http-equiv="X-UA-Compatible" content="IE=edge">
  <meta name="viewport" content="width=device-width, initial-scale=1.0">

  <title>
     Apache Spark&trade; - Lightning-Fast Cluster Computing
   
  </title>

 

 
    <meta name="description" content="Apache Spark is a fast and general engine for big data processing, with built-in modules for streaming, SQL, machine learning and graph processing.">
 

  <!-- Bootstrap core CSS -->
  <link href="/css/cerulean.min.css" rel="stylesheet">
  <link href="/css/custom.css" rel="stylesheet">

  <!-- Code highlighter CSS -->
  <link href="/css/pygments-default.css" rel="stylesheet">

  <script type="text/javascript">

 

。。。。。。。。

 

 

 

0 0