Heka插件编写

来源:互联网 发布:java删除文件 编辑:程序博客网 时间:2024/04/30 07:14
对于heka的消息流向,我们都知道如下:
input--->splitter--->decoder--->router--->filter--->output
其中output可以配置encoder,encoder决定了输出的消息是什么样子的。
heka中内置了一个很好用的encoder,叫做RstEncoder,它展示消息时是按key:value方式展示的。
如下就是一个RstEncoder格式的output输出:
:Timestamp: 2016-02-15 07:11:29.232551585 +0000 UTC
:Type: TestTcpInput
:Hostname: [ip]:29093
:Pid: 0
:Uuid: d178f24b-d967-46ab-801e-d3383b8ff5bf
:Logger: TestTcpInput
:Payload: sdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdfsdf
:EnvVersion: 
:Severity: 7


我们编写的插件就在RstEncoder的基础上稍微修改一下,为它增加一个key:value:
re.writeAttr(buf, "tag", "pijing_myplugin")

具体代码如下(myplugin_encoder.go):

/***** BEGIN LICENSE BLOCK *****# This Source Code Form is subject to the terms of the Mozilla Public# License, v. 2.0. If a copy of the MPL was not distributed with this file,# You can obtain one at http://mozilla.org/MPL/2.0/.## The Initial Developer of the Original Code is the Mozilla Foundation.# Portions created by the Initial Developer are Copyright (C) 2014# the Initial Developer. All Rights Reserved.## Contributor(s):#   Rob Miller (rmiller@mozilla.com)## ***** END LICENSE BLOCK *****/package pluginsimport (    "bytes"    "encoding/base64"    "fmt"    "github.com/mozilla-services/heka/message"    "github.com/mozilla-services/heka/pipeline"    "strconv"    "strings"    "time")// MyPluginEncoder generates a restructured text rendering of a Heka message,// useful for debugging.type MyPluginEncoder struct {    typeNames []string}func (re *MyPluginEncoder) Init(config interface{}) (err error) {    re.typeNames = make([]string, len(message.Field_ValueType_name))    for i, typeName := range message.Field_ValueType_name {        re.typeNames[i] = strings.ToLower(typeName)    }    return}func (re *MyPluginEncoder) writeAttr(buf *bytes.Buffer, name, value string) {    buf.WriteString(fmt.Sprintf(":%s: %s\n", name, value))}func (re *MyPluginEncoder) writeField(buf *bytes.Buffer, name, typeName, repr string,    values []string) {    isString := typeName == "string"    buf.WriteString(fmt.Sprintf("    | name:\"%s\" type:%s value:", name, typeName))    var value string    if len(values) == 1 {        value = values[0]        if isString {            value = fmt.Sprintf("\"%s\"", value)        }        buf.WriteString(value)    } else {        var i int        buf.WriteString("[")        for i, value = range values {            if i > 0 {                buf.WriteString(",")            }            if isString {                value = fmt.Sprintf("\"%s\"", value)            }            buf.WriteString(value)        }        buf.WriteString("]")    }    if repr != "" {        buf.WriteString(fmt.Sprintf(" representation:\"%s\"", repr))    }    buf.WriteString("\n")}func (re *MyPluginEncoder) Encode(pack *pipeline.PipelinePack) (output []byte, err error) {    // Writing out the message attributes is easy.    buf := new(bytes.Buffer)    buf.WriteString("\n")    timestamp := time.Unix(0, pack.Message.GetTimestamp()).UTC()    re.writeAttr(buf, "Timestamp", timestamp.String())    re.writeAttr(buf, "Type", pack.Message.GetType())    re.writeAttr(buf, "Hostname", pack.Message.GetHostname())    re.writeAttr(buf, "Pid", strconv.Itoa(int(pack.Message.GetPid())))    re.writeAttr(buf, "Uuid", pack.Message.GetUuidString())    re.writeAttr(buf, "Logger", pack.Message.GetLogger())    re.writeAttr(buf, "Payload", pack.Message.GetPayload())    re.writeAttr(buf, "EnvVersion", pack.Message.GetEnvVersion())    re.writeAttr(buf, "Severity", strconv.Itoa(int(pack.Message.GetSeverity())))    re.writeAttr(buf, "tag", "pijing_myplugin")    // Writing out the dynamic message fields is a bit of a PITA.    fields := pack.Message.GetFields()    if len(fields) > 0 {        buf.WriteString(":Fields:\n")        for _, field := range fields {            valueType := field.GetValueType()            typeName := re.typeNames[valueType]            var values []string            switch valueType {            case message.Field_STRING:                values = field.GetValueString()            case message.Field_BYTES:                vBytes := field.GetValueBytes()                values = make([]string, len(vBytes))                for i, v := range vBytes {                    values[i] = base64.StdEncoding.EncodeToString(v)                }            case message.Field_DOUBLE:                vDoubles := field.GetValueDouble()                values = make([]string, len(vDoubles))                for i, v := range vDoubles {                    values[i] = strconv.FormatFloat(v, 'g', -1, 64)                }            case message.Field_INTEGER:                vInts := field.GetValueInteger()                values = make([]string, len(vInts))                for i, v := range vInts {                    values[i] = strconv.FormatInt(v, 10)                }            case message.Field_BOOL:                vBools := field.GetValueBool()                values = make([]string, len(vBools))                for i, v := range vBools {                    values[i] = strconv.FormatBool(v)                }            }            re.writeField(buf, field.GetName(), typeName, field.GetRepresentation(),                values)        }    }    buf.WriteString("\n")    return buf.Bytes(), nil}func init() {    pipeline.RegisterPlugin("MyPluginEncoder", func() interface{} {        return new(MyPluginEncoder)    })}




然后在heka-dev中新建externals目录,在下面新建myplugin目录,将myplugin_encoder.go文件放进去。
接着再在heka-dev/cmake目录下新建plugin_loader.cmake文件,内容为:
add_external_plugin(git https://github.com/bellycard/myplugin :local)
其中url中的"myplugin"就是我们的自定义插件所在目录,而:local则表明它在编译时先从externals目录中找寻该插件,找不到再通过url下载。
最后再cd到heka-dev的根目录下,运行./build.sh
可以看到在编译过程中,已经将我们的自定义插件编译进去了。


接着用编译好的heka-dev/build/heka/bin/hekad文件替换掉heka/bin下的hekad文件,接着修改hekad.toml文件,类似于下:
[MyPluginEncoder]


[TestTcpInput]
type = "TcpInput"
address = "[ip]:55555"
decoder = ""
splitter = "NullSplitter"


[TestTcpOutput]
type = "KafkaOutput"
message_matcher = "Logger == 'TestTcpInput' && (Hostname=~/^[ip]:/)"
topic = "heka_test"
addrs = ["[ip]:9092","[ip]:9092"]
encoder = "MyPluginEncoder"


即output使用MyPluginEncoder,接着用tcp client向55555端口发送数据,即可以看到kafka的heka_test topic已经收到了数据:


数据中已经带上了tag。
0 0
原创粉丝点击