func (s *SandboxDecoder) Init(config interface{}) (err error) { s.sbc = config.(*SandboxConfig) s.sbc.ScriptFilename = pipeline.PrependShareDir(s.sbc.ScriptFilename) s.tz = time.UTC if tz, ok := s.sbc.Config["tz"]; ok { s.tz, err = time.LoadLocation(tz.(string)) if err != nil { return } } data_dir := pipeline.PrependBaseDir(DATA_DIR) if !fileExists(data_dir) { err = os.MkdirAll(data_dir, 0700) if err != nil { return } } switch s.sbc.ScriptType { case "lua": default: return fmt.Errorf("unsupported script type: %s", s.sbc.ScriptType) } s.sample = true return }
// Determines the script type and creates interpreter func (this *SandboxFilter) Init(config interface{}) (err error) { if this.sb != nil { return nil // no-op already initialized } this.sbc = config.(*SandboxConfig) this.sbc.ScriptFilename = pipeline.PrependShareDir(this.sbc.ScriptFilename) data_dir := pipeline.PrependBaseDir(DATA_DIR) if !fileExists(data_dir) { err = os.MkdirAll(data_dir, 0700) if err != nil { return } } switch this.sbc.ScriptType { case "lua": this.sb, err = lua.CreateLuaSandbox(this.sbc) if err != nil { return } default: return fmt.Errorf("unsupported script type: %s", this.sbc.ScriptType) } this.preservationFile = filepath.Join(data_dir, this.name+DATA_EXT) if this.sbc.PreserveData && fileExists(this.preservationFile) { err = this.sb.Init(this.preservationFile, "filter") } else { err = this.sb.Init("", "filter") } return }
func (s *SandboxEncoder) Init(config interface{}) (err error) { conf := config.(*SandboxEncoderConfig) s.sbc = &sandbox.SandboxConfig{ ScriptType: conf.ScriptType, ScriptFilename: conf.ScriptFilename, ModuleDirectory: conf.ModuleDirectory, PreserveData: conf.PreserveData, MemoryLimit: conf.MemoryLimit, InstructionLimit: conf.InstructionLimit, OutputLimit: conf.OutputLimit, Profile: conf.Profile, Config: conf.Config, } s.sbc.ScriptFilename = pipeline.PrependShareDir(s.sbc.ScriptFilename) s.sampleDenominator = pipeline.Globals().SampleDenominator s.tz = time.UTC if tz, ok := s.sbc.Config["tz"]; ok { if s.tz, err = time.LoadLocation(tz.(string)); err != nil { return } } dataDir := pipeline.PrependBaseDir(sandbox.DATA_DIR) if !fileExists(dataDir) { if err = os.MkdirAll(dataDir, 0700); err != nil { return } } switch s.sbc.ScriptType { case "lua": s.sb, err = lua.CreateLuaSandbox(s.sbc) default: return fmt.Errorf("Unsupported script type: %s", s.sbc.ScriptType) } if err != nil { return fmt.Errorf("Sandbox creation failed: '%s'", err) } s.preservationFile = filepath.Join(dataDir, s.name+sandbox.DATA_EXT) if s.sbc.PreserveData && fileExists(s.preservationFile) { err = s.sb.Init(s.preservationFile, "encoder") } else { err = s.sb.Init("", "encoder") } if err != nil { return fmt.Errorf("Sandbox initialization failed: %s", err) } s.sb.InjectMessage(func(payload, payload_type, payload_name string) int { s.injected = true s.output = []byte(payload) return 0 }) s.sample = true s.cEncoder = client.NewProtobufEncoder(nil) return }
// Creates the working directory to store the submitted scripts, // configurations, and data preservation files. func (this *SandboxManagerFilter) Init(config interface{}) (err error) { conf := config.(*SandboxManagerFilterConfig) this.maxFilters = conf.MaxFilters this.workingDirectory = pipeline.PrependBaseDir(conf.WorkingDirectory) this.moduleDirectory = pipeline.PrependShareDir(conf.ModuleDirectory) this.memoryLimit = conf.MemoryLimit this.instructionLimit = conf.InstructionLimit this.outputLimit = conf.OutputLimit err = os.MkdirAll(this.workingDirectory, 0700) return }
// Parses a Heka message and extracts the information necessary to start a new // SandboxFilter func (this *SandboxManagerFilter) loadSandbox(fr pipeline.FilterRunner, h pipeline.PluginHelper, dir string, msg *message.Message) (err error) { fv, _ := msg.GetFieldValue("config") if config, ok := fv.(string); ok { var configFile pipeline.ConfigFile if _, err = toml.Decode(config, &configFile); err != nil { return fmt.Errorf("loadSandbox failed: %s\n", err) } else { for name, conf := range configFile { name = getSandboxName(fr.Name(), name) if _, ok := h.Filter(name); ok { // todo support reload return fmt.Errorf("loadSandbox failed: %s is already running", name) } fr.LogMessage(fmt.Sprintf("Loading: %s", name)) confFile := filepath.Join(dir, fmt.Sprintf("%s.toml", name)) err = ioutil.WriteFile(confFile, []byte(config), 0600) if err != nil { return } var sbc SandboxConfig if err = toml.PrimitiveDecode(conf, &sbc); err != nil { return fmt.Errorf("loadSandbox failed: %s\n", err) } scriptFile := filepath.Join(dir, fmt.Sprintf("%s.%s", name, sbc.ScriptType)) err = ioutil.WriteFile(scriptFile, []byte(msg.GetPayload()), 0600) if err != nil { removeAll(dir, fmt.Sprintf("%s.*", name)) return } // check/clear the old state preservation file // this avoids issues with changes to the data model since the last load // and prevents holes in the graph from looking like anomalies os.Remove(filepath.Join(pipeline.PrependBaseDir(DATA_DIR), name+DATA_EXT)) var runner pipeline.FilterRunner runner, err = this.createRunner(dir, name, conf) if err != nil { removeAll(dir, fmt.Sprintf("%s.*", name)) return } err = h.PipelineConfig().AddFilterRunner(runner) if err == nil { this.currentFilters++ } break // only interested in the first item } } } return }
func (s *SandboxDecoder) SetDecoderRunner(dr pipeline.DecoderRunner) { if s.sb != nil { return // no-op already initialized } s.dRunner = dr var original *message.Message switch s.sbc.ScriptType { case "lua": s.sb, s.err = lua.CreateLuaSandbox(s.sbc) default: s.err = fmt.Errorf("unsupported script type: %s", s.sbc.ScriptType) } if s.err == nil { s.preservationFile = filepath.Join(pipeline.PrependBaseDir(DATA_DIR), dr.Name()+DATA_EXT) if s.sbc.PreserveData && fileExists(s.preservationFile) { s.err = s.sb.Init(s.preservationFile, "decoder") } else { s.err = s.sb.Init("", "decoder") } } if s.err != nil { dr.LogError(s.err) pipeline.Globals().ShutDown() return } s.sb.InjectMessage(func(payload, payload_type, payload_name string) int { if s.pack == nil { s.pack = dr.NewPack() if original == nil && len(s.packs) > 0 { original = s.packs[0].Message // payload injections have the original header data in the first pack } } else { original = nil // processing a new message, clear the old message } if len(payload_type) == 0 { // heka protobuf message if original == nil { original = new(message.Message) copyMessageHeaders(original, s.pack.Message) // save off the header values since unmarshal will wipe them out } if nil != proto.Unmarshal([]byte(payload), s.pack.Message) { return 1 } if s.tz != time.UTC { const layout = "2006-01-02T15:04:05.999999999" // remove the incorrect UTC tz info t := time.Unix(0, s.pack.Message.GetTimestamp()) t = t.In(time.UTC) ct, _ := time.ParseInLocation(layout, t.Format(layout), s.tz) s.pack.Message.SetTimestamp(ct.UnixNano()) } } else { s.pack.Message.SetPayload(payload) ptype, _ := message.NewField("payload_type", payload_type, "file-extension") s.pack.Message.AddField(ptype) pname, _ := message.NewField("payload_name", payload_name, "") s.pack.Message.AddField(pname) } if original != nil { // if future injections fail to set the standard headers, use the values // from the original message. if s.pack.Message.Uuid == nil { s.pack.Message.SetUuid(original.GetUuid()) } if s.pack.Message.Timestamp == nil { s.pack.Message.SetTimestamp(original.GetTimestamp()) } if s.pack.Message.Type == nil { s.pack.Message.SetType(original.GetType()) } if s.pack.Message.Hostname == nil { s.pack.Message.SetHostname(original.GetHostname()) } if s.pack.Message.Logger == nil { s.pack.Message.SetLogger(original.GetLogger()) } if s.pack.Message.Severity == nil { s.pack.Message.SetSeverity(original.GetSeverity()) } if s.pack.Message.Pid == nil { s.pack.Message.SetPid(original.GetPid()) } } s.packs = append(s.packs, s.pack) s.pack = nil return 0 }) }