ScalaConn
来源:互联网 发布:栏目包装 知乎 编辑:程序博客网 时间:2024/06/05 05:17
package DAO;import java.sql._import java.text.SimpleDateFormatimport java.util.{List, Date, HashMap, Properties}import com.google.protobuf.TextFormat.ParseExceptionimport org.apache.hadoop.conf.Configurationimport org.apache.hadoop.hbase.{TableName, HBaseConfiguration}import org.apache.hadoop.hbase.client._import org.apache.hadoop.hbase.filter.PageFilterimport org.apache.hadoop.hbase.util.Bytesimport org.apache.hadoop.mapred.JobConfimport org.apache.spark.sql.{Row, DataFrame, SQLContext}import org.apache.spark.{SparkContext, SparkConf}import org.apache.spark.sql.SparkSessionimport org.apache.hadoop.hbase.client.{Table, Connection}import java.text.SimpleDateFormat;import org.apache.hadoop.hbase.io.ImmutableBytesWritableimport org.apache.hadoop.hbase.mapreduce.TableInputFormatimport java.util.Calendar;import java.util.Date;import java.util.GregorianCalendar;import org.apache.spark.sql.catalyst.encoders.ExpressionEncoderimport org.apache.spark.sql.Encoderimport org.apache.spark.rdd.RDDimport org.apache.hadoop.hbase.mapred.TableOutputFormatimport org.slf4j.LoggerFactoryobject ScalaConn { def LOG = LoggerFactory.getLogger(getClass) val driver = "com.mysql.jdbc.Driver" val jdbcUrl = "jdbc:mysql://192.168.100.233:3306/sp2p628" val MYSQL_USERNAME: String = "root" val MYSQL_PWD: String = "dzwang**" val MYSQL_CONNECTION_URL: String = "jdbc:mysql://192.168.100.233:3306/sp2p628"val warehouseLocation = "file:${system:user.dir}/spark-warehouse"val spark = SparkSession .builder() .master("local[*]") //spark://192.168.100.110:7077 .appName("SparkSQL") .config("spark.executor.memory", "2g") .config("spark.sql.warehouse.dir", "file:///D:/BigData/spark-warehouse") .getOrCreate()//local "local[*]" def getJdbcConn {// var connection: Connection = null try { Class.forName(driver)// connection = DriverManager.getConnection(jdbcUrl, MYSQL_USERNAME, MYSQL_PWD)// val statement = connection.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY) // val resultSet = statement.executeQuery("select name, password from scala_t") // while (resultSet.next()) { // val name = resultSet.getString("name") // val password = resultSet.getString("password") // } // return statement; } catch { case e: Exception => { LOG.error(e.toString) } } } def getP2PDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.100.233:3306/sp2p628" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF } def getHadoopDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.55.218:3306/hadoop" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF } def getHadoopFtoulanalyDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.55.218:3306/ftoulanaly" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF } def getHadoopFtcrawlerDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.55.218:3306/ftcrawler" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF } def getHadoopDedetestDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.55.218:3306/dedetest" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF } def getShopDataFrame(tableName: String): DataFrame = { val property = new Properties() val url = "jdbc:mysql://192.168.100.239:3306/ftoul_shop" property.put("user","root") property.put("password", "dzwang**") val jdbcDF = spark.read.jdbc(url,tableName, property) jdbcDF }// def getHaseDataFrame(tableName: String):DataFrame=// {// getHbaseConf.set(TableInputFormat.INPUT_TABLE,tableName)// import spark.implicits._// val hbaseRDD = spark.sparkContext.newAPIHadoopRDD(getHbaseConf,classOf[TableInputFormat],classOf[ImmutableBytesWritable],classOf[Result])// val HbaseDF = hbaseRDD.map(r=>(// Bytes.toString(r._2.getValue(Bytes.toBytes("info"),Bytes.toBytes("customer_id"))),// Bytes.toString(r._2.getValue(Bytes.toBytes("info"),Bytes.toBytes("create_id")))// )).toDF("customer_id","create_id")// shop.createOrReplaceTempView("shop")// HbaseDF// } /** * 获得指定日期的前一天 */ def getSpecifiedDayBefore(specifiedDay:String):String = { //SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); var c:Calendar = Calendar.getInstance(); var date:Date =null; try { date = new SimpleDateFormat("yyyyMMdd").parse(specifiedDay) } catch { case e: Exception => { e.printStackTrace() } } c.setTime(date); var day =c.get(Calendar.DATE); c.set(Calendar.DATE,day-1); var dayBefore:String =new SimpleDateFormat("yyyyMMdd").format(c.getTime()); dayBefore } /** * 获得指定日期的后一天 */ def getSpecifiedDayAfter(specifiedDay:String):String = { //SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd"); var c:Calendar = Calendar.getInstance(); var date:Date =null; try { date = new SimpleDateFormat("yyyyMMdd").parse(specifiedDay) } catch { case e: Exception => { e.printStackTrace() } } c.setTime(date); var day =c.get(Calendar.DATE); c.set(Calendar.DATE,day+1); var dayBefore:String =new SimpleDateFormat("yyyyMMdd").format(c.getTime()); dayBefore } def getHbaseConf: Configuration = { val conf: Configuration = HBaseConfiguration.create conf.set("hbase.zookeeper.property.clientPort", "2181") conf.set("spark.executor.memory", "3000m") conf.set("hbase.zookeeper.quorum", "192.168.100.110,192.168.100.111,192.168.100.112") conf.set("hbase.master", "192.168.100.110:16020") conf.set("hbase.rootdir", "hdfs://192.168.100.110:9000/hbase") conf } def GetStampByTime(time: String): Timestamp = { var Stamp: Timestamp = new Timestamp(System.currentTimeMillis) val sdf: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss") var date: Date = null try { date = sdf.parse(time) Stamp = new Timestamp(date.getTime)// println("DT:"+Stamp) } catch { case e: Exception => { LOG.error(e.toString) } } Stamp } def GetStampByTime1(time: String): Timestamp = { var Stamp: Timestamp = new Timestamp(System.currentTimeMillis) val sdf: SimpleDateFormat = new SimpleDateFormat("yyyyMMddHHmmss") var date: Date = null try { date = sdf.parse(time) Stamp = new Timestamp(date.getTime)// println("DT:"+Stamp) } catch { case e: Exception => { LOG.error(e.toString) } } Stamp } def evaluate( i:Int): String = { var date:Date = new Date();//取时间 var sf:SimpleDateFormat = new SimpleDateFormat("yyyyMMdd");// var nowDate:String = sf.format(date);// println("nowDate="+nowDate) var time:Long = (date.getTime() / 1000) + 60 * 60 * 24 * i;//秒 date.setTime(time * 1000);//毫秒 var nextDate:String = sf.format(date);// println("nextDate="+nextDate) nextDate }// fmt: yyyy-MM-dd HH:mm:ss def getYesterday(i:Int,fmt:String):String= { var ft = "" if (fmt.isEmpty){ ft = "yyyy-MM-dd HH:mm:ss" }else{ ft = fmt } var dateFormat: SimpleDateFormat = new SimpleDateFormat(ft) var cal: Calendar = Calendar.getInstance() cal.add(Calendar.DATE, i) var yesterday = dateFormat.format(cal.getTime()) yesterday } def getMaxDate(conf: Configuration, hconn: Connection,table: Table, colFamily: String, dt: String): String = { var maxDate: String = "2014-12-01 00:00:00" var results: ResultScanner = null// val table: Table = hconn.getTable(TableName.valueOf(tableName)) try { val scan: Scan = new Scan scan.setReversed(true) scan.setMaxVersions() scan.setMaxResultSize(1) scan.setFilter(new PageFilter(1)) scan.addFamily(Bytes.toBytes(colFamily)); scan.addColumn(Bytes.toBytes(colFamily), Bytes.toBytes(dt)); results = table.getScanner(scan)// println("results =================="+results.next()) var i = 0 import scala.collection.JavaConversions._ for (r <- results) { if( i == 0) { maxDate = new String(r.getValue(Bytes.toBytes(colFamily), Bytes.toBytes(dt))) }// i= i+1// println("maxDate =================="+maxDate) } if ((maxDate == "") || (maxDate == null)|| (maxDate == "null")) { maxDate = "2014-12-01 00:00:00" } LOG.info("maxDate="+maxDate) } catch { case e: Exception => { LOG.error(e.toString) } } finally { try { if (results != null) results.close } catch { case e: Exception => { LOG.error(e.toString) } } } maxDate } }
0 0
- ScalaConn
- ScalaConn 中使用SPARK 启用hive 代码修改
- linux下如何关闭防火墙?如何查看防火墙当前的状态
- Web系统大规模并发——电商秒杀与抢购
- Path环境变量配置方式2
- 表达式求值(中缀转后缀及后缀表达式求值)
- Foxmail 添加QQ邮箱失败 提示密码错误
- ScalaConn
- ScalaHbase
- DxO OpticsPro 11.4.1.12119 Windows 简体中文 汉化版 送4部教程
- 系统的简单注解
- DOCKER 的安装和卸载
- 段错误
- day16-listener&filter
- 对sftp账号提供读写权限的控制
- UVALive 2531 The K-League 最大流