spark 2.1 BlockManagerInfo
来源:互联网 发布:网络直销公司排名 编辑:程序博客网 时间:2024/06/06 04:06
private[spark] class BlockManagerInfo( val blockManagerId: BlockManagerId, timeMs: Long, val maxMem: Long, val slaveEndpoint: RpcEndpointRef) extends Logging { private var _lastSeenMs: Long = timeMs private var _remainingMem: Long = maxMem // Mapping from block id to its status. private val _blocks = new JHashMap[BlockId, BlockStatus] // Cached blocks held by this BlockManager. This does not include broadcast blocks. private val _cachedBlocks = new mutable.HashSet[BlockId] def getStatus(blockId: BlockId): Option[BlockStatus] = Option(_blocks.get(blockId)) def updateLastSeenMs() { _lastSeenMs = System.currentTimeMillis() } def updateBlockInfo( blockId: BlockId, storageLevel: StorageLevel, memSize: Long, diskSize: Long) { updateLastSeenMs() if (_blocks.containsKey(blockId)) { // The block exists on the slave already. val blockStatus: BlockStatus = _blocks.get(blockId) val originalLevel: StorageLevel = blockStatus.storageLevel val originalMemSize: Long = blockStatus.memSize if (originalLevel.useMemory) { _remainingMem += originalMemSize } } if (storageLevel.isValid) { /* isValid means it is either stored in-memory or on-disk. * The memSize here indicates the data size in or dropped from memory, * externalBlockStoreSize here indicates the data size in or dropped from externalBlockStore, * and the diskSize here indicates the data size in or dropped to disk. * They can be both larger than 0, when a block is dropped from memory to disk. * Therefore, a safe way to set BlockStatus is to set its info in accurate modes. */ var blockStatus: BlockStatus = null if (storageLevel.useMemory) { blockStatus = BlockStatus(storageLevel, memSize = memSize, diskSize = 0) _blocks.put(blockId, blockStatus) _remainingMem -= memSize logInfo("Added %s in memory on %s (size: %s, free: %s)".format( blockId, blockManagerId.hostPort, Utils.bytesToString(memSize), Utils.bytesToString(_remainingMem))) } if (storageLevel.useDisk) { blockStatus = BlockStatus(storageLevel, memSize = 0, diskSize = diskSize) _blocks.put(blockId, blockStatus) logInfo("Added %s on disk on %s (size: %s)".format( blockId, blockManagerId.hostPort, Utils.bytesToString(diskSize))) } if (!blockId.isBroadcast && blockStatus.isCached) { _cachedBlocks += blockId } } else if (_blocks.containsKey(blockId)) { // If isValid is not true, drop the block. val blockStatus: BlockStatus = _blocks.get(blockId) _blocks.remove(blockId) _cachedBlocks -= blockId if (blockStatus.storageLevel.useMemory) { logInfo("Removed %s on %s in memory (size: %s, free: %s)".format( blockId, blockManagerId.hostPort, Utils.bytesToString(blockStatus.memSize), Utils.bytesToString(_remainingMem))) } if (blockStatus.storageLevel.useDisk) { logInfo("Removed %s on %s on disk (size: %s)".format( blockId, blockManagerId.hostPort, Utils.bytesToString(blockStatus.diskSize))) } } } def removeBlock(blockId: BlockId) { if (_blocks.containsKey(blockId)) { _remainingMem += _blocks.get(blockId).memSize _blocks.remove(blockId) } _cachedBlocks -= blockId } def remainingMem: Long = _remainingMem def lastSeenMs: Long = _lastSeenMs def blocks: JHashMap[BlockId, BlockStatus] = _blocks // This does not include broadcast blocks. def cachedBlocks: collection.Set[BlockId] = _cachedBlocks override def toString: String = "BlockManagerInfo " + timeMs + " " + _remainingMem def clear() { _blocks.clear() }}
0 0
- spark 2.1 BlockManagerInfo
- spark 2.1 spark.yarn.services
- Spark 2.1.0 -- Spark Streaming Programming Guide
- spark 2.1 spark-shell Startup Process
- spark 2.1 spark executor topology information
- spark-2.1.0安装
- spark(2.1.0)
- Spark 2.1 structured streaming
- Spark 2.1 CallSite
- spark 2.1 ConfigProvider
- Spark ML 2.1 --Pipelines
- spark-2.1.0
- spark 2.1 SparkContext postEnvironmentUpdate
- spark 2.1 StorageLevel
- spark 2.1 BlockManagerId
- spark 2.1 BlockManagerMasterEndpoint
- spark 2.1 BlockManagerSlaveEndpoint
- spark 2.1 Memory Store
- Socket+SparkStreaing+Redis操作代码
- ubuntu上通过命令行导出mysql数据库文件到widows系统上
- 数据结构——线性表——顺序存储结构——C++实现线性表
- logstash日志分析的配置和使用
- [iOS 使用sqllite3.0] 创建表 , 增/删/查/改 常用命令语句
- spark 2.1 BlockManagerInfo
- Java Web项目部署到阿里云服务器
- 如何使用Android Studio把自己的Android library分享到jCenter和Maven Central
- gdb调试复习整理及coredump的设置与解析
- LINUX常用命令
- 项目启动时 org.drools.task报错
- 关于风动草的shader
- iTunesConnect-提交应用屏幕快照图片尺寸
- 从零开始编写深度学习库(二)FullyconnecteLayer CPU编写