// Create in the temp dir a Dockerfile proper to the exec type func CreateDockerfile(tmp_dir_path string, new_exec_path string, exec_file_name string, category categorization) (imageName string) { imageName_arrays := strings.SplitN(exec_file_name, ".", -1) imageName = imageName_arrays[0] execPathDest := fmt.Sprintf("bin/%s", exec_file_name) entryPoint := fmt.Sprintf("bin/%s", exec_file_name) // cp the templates/Dockerfile into the tmp dir newDockerfilePath := tmp_dir_path + "/Dockerfile" // read new Dockerfile DockerfileString := T_GENERIC // replace the placeholders to build the acutal Dockerfile replaceBaseImage := strings.NewReplacer("<BASE_IMAGE>", category.baseDockerImage) DockerfileStringReplaced := replaceBaseImage.Replace(DockerfileString) replaceExecPathSrc := strings.NewReplacer("<EXEC_PATH_SRC>", exec_file_name) DockerfileStringReplaced = replaceExecPathSrc.Replace(DockerfileStringReplaced) replaceExecPathDest := strings.NewReplacer("<EXEC_PATH_DEST>", execPathDest) DockerfileStringReplaced = replaceExecPathDest.Replace(DockerfileStringReplaced) replaceEntrypoint := strings.NewReplacer("<ENTRYPOINT>", entryPoint) DockerfileStringReplaced = replaceEntrypoint.Replace(DockerfileStringReplaced) // write in the Dockerfile the actual content with replaced values DockerfileBytesReplaced := []byte(DockerfileStringReplaced) err2 := ioutil.WriteFile(newDockerfilePath, DockerfileBytesReplaced, 0644) check(err2) return }
// darwinRelink makes paths of linked libraries relative to executable. // // /usr/local/Cellar/qt5/5.3.0/lib/QtWidgets.framework/Versions/5/QtWidgets // /usr/local/opt/qt5/lib/QtWidgets.framework/Versions/5/QtWidgets // -> // @executable_path/../Frameworks/QtWidgets.framework/Versions/5/QtWidgets func darwinRelink(qlib, name string, strict bool) (err error) { file, err := macho.Open(name) if err != nil { return } defer file.Close() libs, err := file.ImportedLibraries() if err != nil { return } var qlib2 string // detect alternative qlib (homebrew symlinks Qt to /usr/local/opt) for _, lib := range libs { idx := strings.Index(lib, "QtCore") if idx > 0 { qlib2 = lib[:idx-1] // drop sep break } } replacer := strings.NewReplacer(qlib, relinkBase, qlib2, relinkBase) if len(qlib2) < 1 && strict { return fmt.Errorf("darwin relink: corrupt binary: %s", name) } else if !strict { replacer = strings.NewReplacer(qlib, relinkBase) } // replace qlib/qlib2 to relinkBase for _, lib := range libs { rlib := replacer.Replace(lib) cmd := exec.Command("install_name_tool", "-change", lib, rlib, name) if err = cmd.Run(); err != nil { return fmt.Errorf("darwin relink: %v", err) } } return }
// Configure initializes this formatter with values from a plugin config. func (format *CollectdToInflux10) Configure(conf core.PluginConfig) error { plugin, err := core.NewPluginWithType(conf.GetString("CollectdToInflux1009", "format.Forward"), conf) if err != nil { return err } format.base = plugin.(core.Formatter) format.tagString = strings.NewReplacer(",", "\\,", " ", "\\ ") format.stringString = strings.NewReplacer("\"", "\\\"") return nil }
// Strips html markup, then limits to 280 characters. If the original text was // longer than 280 chars, an ellipsis is appended. func stripAndCrop(content string) string { content = processString(content, strings.NewReplacer("\n", " ").Replace, strings.NewReplacer(" ", " ").Replace, strings.TrimSpace, html.UnescapeString, html.UnescapeString, sanitize.HTML) if len(content) <= 280 { return content } return strings.TrimSpace(content[0:279]) + "…" }
func isMatch(s1, s2 string) bool { // Return immediate failure for empty value. if s1 == "" { return false } // Return immediate success for wildcard. if s2 == "*" { return true } // Make case-insensitive s1 = strings.ToLower(s1) s2 = strings.ToLower(s2) // Return immediate success for full match (case-insensitive). if s1 == s2 { return true } // Replace ":", ";", "," and "/" chars with whitespace. // Split s1 into whitespace separated fields. r := strings.NewReplacer(":", " ", ";", " ", ",", " ", "/", " ") fields := strings.Fields(r.Replace(s1)) // Adjust any wildcards. if strings.ContainsAny(s2, "?*") { r := strings.NewReplacer("?", ".?", "*", ".*") s2 = r.Replace(s2) } // Default return value. retval := false // Create the regexp match checker (type *regexp.Regexp). checkMatch := regexp.MustCompile(s2) // Match the search term (s2) against each of s1's fields. for _, field := range fields { if checkMatch.MatchString(field) { retval = true break } } return retval }
func init() { var commonInitialismsForReplacer []string for _, initialism := range commonInitialisms { commonInitialismsForReplacer = append(commonInitialismsForReplacer, initialism, strings.Title(strings.ToLower(initialism))) } commonInitialismsReplacer = strings.NewReplacer(commonInitialismsForReplacer...) }
func postsHandler(w http.ResponseWriter, r *http.Request, title string) { log.Print("Title is: " + title) log.Print(len(strings.Split(title, "/"))) if len(strings.Split(title, "/")) > 2 { p, err := loadPage(title+".html", config.RootDesc, config.SiteName) if err != nil { log.Print(err) http.Redirect(w, r, "/", http.StatusFound) return } renderTemplate(w, "root", p) } else { r := strings.NewReplacer("_", " ") var data bytes.Buffer for i := len(posts) - 1; i >= 0; i-- { s := posts[i] row := fmt.Sprintf("<p><a href=\"/%s\">%s</a>\n\t\t\t\t<span class=\"blog-post-meta\">%s</span>\n\t\t\t\t</p>\n", strings.Split(s, ".html")[0], r.Replace(strings.Split(filepath.Base(s), ".html")[0]), dateFromPath(s)) log.Print(s) data.Write([]byte(row)) } p := &Page{SiteName: config.SiteName, PageTitle: "blargh", Body: template.HTML(data.String()), PostDate: "N/A", PostTitle: "plupp", Description: config.PostsDesc} renderTemplate(w, "posts", p) } }
func parserPkg(pkgRealpath, pkgpath string) error { rep := strings.NewReplacer("/", "_", ".", "_") commentFilename = COMMENTFL + rep.Replace(pkgpath) + ".go" if !compareFile(pkgRealpath) { Info(pkgRealpath + " has not changed, not reloading") return nil } genInfoList = make(map[string][]ControllerComments) fileSet := token.NewFileSet() astPkgs, err := parser.ParseDir(fileSet, pkgRealpath, func(info os.FileInfo) bool { name := info.Name() return !info.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go") }, parser.ParseComments) if err != nil { return err } for _, pkg := range astPkgs { for _, fl := range pkg.Files { for _, d := range fl.Decls { switch specDecl := d.(type) { case *ast.FuncDecl: if specDecl.Recv != nil { parserComments(specDecl.Doc, specDecl.Name.String(), fmt.Sprint(specDecl.Recv.List[0].Type.(*ast.StarExpr).X), pkgpath) } } } } } genRouterCode() savetoFile(pkgRealpath) return nil }
func (ttb *tosTCByte) escape() []canonTOSTCByteRecord { trs := make([]canonTOSTCByteRecord, len(ttb.Records)) sr := strings.NewReplacer( "Capable", "", "(", "", ")", "", "+", "", "-", "", "/", "", ".", "", " ", "", ) for i, tr := range ttb.Records { s := strings.TrimSpace(tr.Keyword) trs[i].OrigKeyword = s ss := strings.Split(s, " ") if len(ss) > 1 { trs[i].Keyword = strings.Join(ss[1:], " ") } else { trs[i].Keyword = ss[0] } trs[i].Keyword = sr.Replace(trs[i].Keyword) n, err := strconv.ParseUint(tr.Binary, 2, 8) if err != nil { continue } trs[i].Value = int(n) } return trs }
func CMD(c *cli.Context) { checkCache(c) args := c.Args().First() hosts := []string{} for name, ip := range allNodes() { hosts = append(hosts, name, ip) } r := strings.NewReplacer(hosts...) argsWithIPs := fmt.Sprintf(r.Replace(args)) parts := strings.Split(argsWithIPs, " ") cmd := exec.Command(parts[0], parts[1:]...) cmd.Stdout = stdout cmd.Stdin = os.Stdin err := cmd.Run() if err != nil { fmt.Println(err) os.Exit(1) } }
// Prepares strings by splitting by caps, spaces, dashes, and underscore func split(str string) (words []string) { repl := strings.NewReplacer( "@", "At ", "&", "And ", "|", "Pipe ", "$", "Dollar ", "!", "Bang ", "-", " ", "_", " ", ) rex1 := regexp.MustCompile(`(\p{Lu})`) rex2 := regexp.MustCompile(`(\pL|\pM|\pN|\p{Pc})+`) str = trim(str) // Convert dash and underscore to spaces str = repl.Replace(str) // Split when uppercase is found (needed for Snake) str = rex1.ReplaceAllString(str, " $1") // check if consecutive single char things make up an initialism for _, k := range initialisms { str = strings.Replace(str, rex1.ReplaceAllString(k, " $1"), " "+k, -1) } // Get the final list of words words = rex2.FindAllString(str, -1) return }
func (b ServiceBroker) ToJSON() string { bytes, err := ioutil.ReadFile(assets.NewAssets().ServiceBroker + "/cats.json") Expect(err).To(BeNil()) replacer := strings.NewReplacer( "<fake-service>", b.Service.Name, "<fake-service-guid>", b.Service.ID, "<dashboard-client-key>", b.Service.DashboardClient.Key, "<sso-test>", b.Service.DashboardClient.ID, "<sso-secret>", b.Service.DashboardClient.Secret, "<sso-redirect-uri>", b.Service.DashboardClient.RedirectUri, "<fake-plan>", b.SyncPlans[0].Name, "<fake-plan-guid>", b.SyncPlans[0].ID, "<fake-plan-2>", b.SyncPlans[1].Name, "<fake-plan-2-guid>", b.SyncPlans[1].ID, "<fake-async-plan>", b.AsyncPlans[0].Name, "<fake-async-plan-guid>", b.AsyncPlans[0].ID, "<fake-async-plan-2>", b.AsyncPlans[1].Name, "<fake-async-plan-2-guid>", b.AsyncPlans[1].ID, "<fake-sso-plan>", b.SsoPlans[0].Name, "<fake-sso-plan-guid>", b.SsoPlans[0].ID, "<sso-plan-client-id>", b.SsoPlans[0].DashboardClient.ID, "<sso-plan-secret>", b.SsoPlans[0].DashboardClient.Secret, ) return replacer.Replace(string(bytes)) }
func TestRead_TildePath(t *testing.T) { isPath := true home, err := homedir.Dir() if err != nil { t.Fatalf("err: %s", err) } f, cleanup := testTempFile(t, home) defer cleanup() if _, err := io.WriteString(f, "foobar"); err != nil { t.Fatalf("err: %s", err) } f.Close() r := strings.NewReplacer(home, "~") homePath := r.Replace(f.Name()) contents, wasPath, err := Read(homePath) if err != nil { t.Fatalf("err: %s", err) } if wasPath != isPath { t.Fatalf("expected wasPath: %t, got %t", isPath, wasPath) } if contents != "foobar" { t.Fatalf("expected contents %s, got %s", "foobar", contents) } }
func markdownEscape(s string) string { return strings.NewReplacer( "_", "\\_", "*", "\\*", "[", "\\[", ).Replace(s) }
// 格式化日志信息 func logMagFormat(format string, msg *LogMsg) string { if nil == msg { return "" } if 0 == len(format) { format = "${yyyy}-${MM}-${dd} ${hh}:${mm}:${ss}${SSSSSS} [${TARGET}] ([${LOG_GROUP}][${LOG_TAG}][L${FILE_LINE} ${FUNC_NAME}])\n${MSG}" } // 格式化时间 format = SFTimeUtil.YMDHMSSSignFormat(msg.dateTime, format) // 日志的格式化信息,别随意更换顺序,因为根据设计来进行日志信息的格式化操作 logFormat := []string{ "${LOG_GROUP}", msg.logGroup, "${LOG_TAG}", msg.logTag, "${FILE_LINE}", strconv.Itoa(msg.fileLine), "${FILE_PATH}", msg.filePath, "${FUNC_NAME}", msg.funcName, "${STACK}", msg.stack, "${TARGET}", string(msg.target), "${MSG}", msg.msg, } replacer := strings.NewReplacer(logFormat...) return replacer.Replace(format) }
func parseDockerfileTemplate(template string) string { r := strings.NewReplacer( "$(ARCH)", getArch(), "$(PROJECT_VERSION)", metadata.Version) return r.Replace(template) }
func usageMessage() string { template := strings.TrimLeft(usageMessageRaw, "\n") replacements := []string{ "$models", modelsReadable(), } return strings.NewReplacer(replacements...).Replace(template) }
// ------------------------------------------------------------------ // - TRANSLATION -------------------------------------------------- // ------------------------------------------------------------------ func newTranslation(pattern string, replacement string) (instruction, error) { rc1 := utf8.RuneCountInString(pattern) rc2 := utf8.RuneCountInString(replacement) if rc1 != rc2 { return nil, fmt.Errorf("Translation 'y' pattern and replacement must be equal length") } // fill out repls array with alternating patterns and their replacements var repls = make([]string, rc1+rc2) idx := 0 for _, ch := range pattern { repls[idx] = string(ch) idx += 2 } idx = 1 for _, ch := range replacement { repls[idx] = string(ch) idx += 2 } stringReplacer := strings.NewReplacer(repls...) // now return a custom-made instruction for this translation: return func(svm *vm) error { svm.pat = stringReplacer.Replace(svm.pat) svm.ip++ return nil }, nil }
func (g *Generator) generatePerModelFiles(templateFileNames []string, modelTpls *template.Template, modelsDir string, filter func(modelInfo *modelInfo) bool) error { for _, tplFileName := range templateFileNames { tplName := filepath.Base(tplFileName) // Apply the templates to each model in the API for _, modelInfo := range g.modelsInfo { if filter(modelInfo) { continue } // TODO: Do this concurrently repl := strings.NewReplacer( templateExt, "", fileNameModelNameInterpolation, modelInfo.Name, fileNameAPINameInterpolation, g.config.APIName, fileNameAPIPrefixInterpolation, g.config.APIPrefix, ) fileName := repl.Replace(tplName) err := generateFile(path.Join(modelsDir, fileName), modelTpls.Lookup(tplName), templateData{ Config: g.config, API: g.api, CurrentModelInfo: modelInfo, AllModelsInfo: g.modelsInfo, AuthInfo: g.authInfo, CurrentTime: time.Now(), }) if err != nil { return errors.Annotatef(err, "when generating model or service %q", modelInfo.Name) } } } return nil }
// Return s decorated with quote characters so it can safely be // included in a shell command. func ShellQuote(s string) string { if len(s) > 0 && !strings.ContainsAny(s, shellmeta) { return s // fast path for common case } double := strings.Contains(s, "\"") single := strings.Contains(s, "'") if double && single { if shellreplacer == nil { pairs := make([]string, len(shellmeta)*2) for i := 0; i < len(shellmeta); i++ { pairs[i*2] = string(shellmeta[i]) pairs[i*2+1] = "\\" + string(shellmeta[i]) } shellreplacer = strings.NewReplacer(pairs...) } return shellreplacer.Replace(s) } else if single { // use double quotes, but be careful of $ return "\"" + strings.Replace(s, "$", "\\$", -1) + "\"" } else { // use single quotes return "'" + s + "'" } panic("unreachable code") }
func (ri *routeInfo) reverse(v ...interface{}) string { route := ri.route switch vlen, nlen := len(v), len(ri.paramNames); { case vlen < nlen: panic(fmt.Errorf("too few arguments: %v (controller is %v)", route.Name, reflect.TypeOf(route.Controller).Name())) case vlen > nlen: panic(fmt.Errorf("too many arguments: %v (controller is %v)", route.Name, reflect.TypeOf(route.Controller).Name())) case vlen+nlen == 0: return route.Path } var arg MethodArgs for _, arg = range route.MethodTypes { break } for i := 0; i < len(v); i++ { t := arg[ri.paramNames[i]] validateParser := typeValidateParsers[t] if !validateParser.Validate(v[i]) { panic(fmt.Errorf("parameter type mismatch: %v (controller is %v)", route.Name, reflect.TypeOf(route.Controller).Name())) } } var oldnew []string for i := 0; i < len(v); i++ { oldnew = append(oldnew, ri.rawParamNames[i], fmt.Sprint(v[i])) } replacer := strings.NewReplacer(oldnew...) path := replacer.Replace(route.Path) return normPath(path) }
// Initialize the seek journal file for keeping track of our place in a log // file. func (fm *FileMonitor) setupJournalling(journalName string) (err error) { // Check that the `seekjournals` directory exists, try to create it if // not. journalDir := PrependBaseDir("seekjournals") var dirInfo os.FileInfo if dirInfo, err = os.Stat(journalDir); err != nil { if os.IsNotExist(err) { if err = os.MkdirAll(journalDir, 0700); err != nil { fm.LogMessage(fmt.Sprintf("Error creating seek journal folder %s: %s", journalDir, err)) return } } else { fm.LogMessage(fmt.Sprintf("Error accessing seek journal folder %s: %s", journalDir, err)) return } } else if !dirInfo.IsDir() { return fmt.Errorf("%s doesn't appear to be a directory", journalDir) } // Generate the full file path and save it on the FileMonitor struct. r := strings.NewReplacer(string(os.PathSeparator), "_", ".", "_") journalName = r.Replace(journalName) fm.seekJournalPath = filepath.Join(journalDir, journalName) return fm.recoverSeekPosition() }
func init() { // Handle permitted constructions like "100L200,230" pathCmdSub = strings.NewReplacer(",", " ", "L", " L ", "l", " l ", "C", " C ", "c", " c ", "M", " M ", "m", " m ") }
func (db DBU) StreamJSON(w io.Writer, query string, args ...interface{}) error { fn := func(columns []string, count int, buffer []interface{}) { if count > 0 { fmt.Fprintln(w, ",") } fmt.Fprintln(w, " {") repl := strings.NewReplacer("\n", "\\\\n", "\t", "\\\\t", "\r", "\\\\r", `"`, `\"`) for i, s := range toString(buffer) { comma := ",\n" if i >= len(buffer)-1 { comma = "\n" } if isNumber(s) { fmt.Fprintf(w, ` "%s": %s%s`, columns[i], s, comma) } else { s = repl.Replace(s) fmt.Fprintf(w, ` "%s": "%s"%s`, columns[i], s, comma) } } fmt.Fprint(w, " }") } fmt.Fprintln(w, "[") defer fmt.Fprintln(w, "\n]") return db.Stream(fn, query, args...) }
// 构建请求URL func (this *URL) getURL(params map[string]string) string { kv := make([]string, 10) for k, v := range params { kv = append(kv, k, v) } return strings.NewReplacer(kv...).Replace(this.value) }
// Format formats Entry func (f *TextFormatter) Format(entry *Entry) []byte { result := f.format additionalBuf := &bytes.Buffer{} data := filterEntryFields(entry) if marshaledData, err := json.Marshal(data); err == nil { additionalBuf.Write(marshaledData) } replaces := make([]string, 0, 2+len(entry.Fields)) replaces = append( replaces, ":level:", entry.Level.String(), ":time:", entry.Time.UTC().Format("2006-01-02T15:04:05.000000-07:00"), ":message:", entry.Message, ":additional:", additionalBuf.String(), ) for key, value := range entry.Fields { replaces = append(replaces, fmt.Sprintf(":%s:", key), fmt.Sprintf("%s", value)) } replacer := strings.NewReplacer(replaces...) buf := &bytes.Buffer{} replacer.WriteString(buf, result) buf.WriteByte('\n') return buf.Bytes() }
// Get query data as string. // // @return string func (this *Query) ToString() string { if this.DataString != "" { return this.DataString } for key, value := range this.Data { if util.TypeReal(value) == "[]string" { value = _fmt.Sprintf("[\"%s\"]", _str.Join(value.([]string), "\",\"")) } this.DataString += _fmt.Sprintf( "%s=%s&", util.UrlEncode(key), util.UrlEncode(util.String(value))) } if this.DataString != "" { // drop last "&" this.DataString = this.DataString[0 : len(this.DataString)-1] // purify some encoded stuff this.DataString = _str.NewReplacer( "%5B", "[", "%5D", "]", "%2C", ",", ).Replace(this.DataString) } return this.DataString }
func downloadArtifactsForSuite(dirPrefix string, allArtifacts map[string][]devicefarm.ListArtifactsOutput, suite devicefarm.Suite) { suiteArn := *suite.Arn artifactTypes := []string{"LOG", "SCREENSHOT", "FILE"} r := strings.NewReplacer(":suite:", ":artifact:") artifactPrefix := r.Replace(suiteArn) for _, artifactType := range artifactTypes { typedArtifacts := allArtifacts[artifactType] for _, artifactList := range typedArtifacts { count := 0 for _, artifact := range artifactList.Artifacts { if strings.HasPrefix(*artifact.Arn, artifactPrefix) { //pathFull := strings.Split(suiteArn, ":")[6] //pathSuffix := strings.Split(pathFull, "/") //runId := pathSuffix[0] //jobId := pathSuffix[1] //suiteId := pathSuffix[2] //artifactId := pathSuffix[3] fileName := fmt.Sprintf("%s/%d_%s.%s", dirPrefix, count, *artifact.Name, *artifact.Extension) //fileName := fmt.Sprintf("%s/%s/%s/%s.%s", dirPrefix, suiteId, artifactId, *artifact.Name, *artifact.Extension) fmt.Printf("- [%s] %s\n", artifactType, fileName) //fmt.Printf("- [%s] %s.%s\n", artifactType, *artifact.Name, *artifact.Extension) downloadArtifact(fileName, artifact) count++ } } } } }
func (w *ResponseWriter) WriteHeader(status int) { proto := "HTTP/1.0" if w.r.ProtoAtLeast(1, 1) { proto = "HTTP/1.1" } codestring := http.StatusText(status) w.headers_chunk += proto + " " + strconv.Itoa(status) + " " + codestring + "\r\n" C.uwsgi_simple_set_status(w.wsgi_req, C.int(status)) if w.headers.Get("Content-Type") == "" { w.headers.Set("Content-Type", "text/html; charset=utf-8") } for k := range w.headers { for _, v := range w.headers[k] { v = strings.NewReplacer("\n", " ", "\r", " ").Replace(v) v = strings.TrimSpace(v) w.headers_chunk += k + ": " + v + "\r\n" C.uwsgi_simple_inc_headers(w.wsgi_req) } } w.headers_chunk += "\r\n" c_h_chunk := C.CString(w.headers_chunk) defer C.free(unsafe.Pointer(c_h_chunk)) C.uwsgi_simple_response_write_header(w.wsgi_req, c_h_chunk, C.size_t(len(w.headers_chunk))) w.wroteHeader = true }
// Start entry point for chaincodes bootstrap. func Start(cc Chaincode) error { viper.SetEnvPrefix("OPENCHAIN") viper.AutomaticEnv() replacer := strings.NewReplacer(".", "_") viper.SetEnvKeyReplacer(replacer) flag.StringVar(&peerAddress, "peer.address", "", "peer address") flag.Parse() chaincodeLogger.Debug("Peer address: %s", getPeerAddress()) // Establish connection with validating peer clientConn, err := newPeerClientConnection() if err != nil { chaincodeLogger.Error(fmt.Sprintf("Error trying to connect to local peer: %s", err)) return fmt.Errorf("Error trying to connect to local peer: %s", err) } chaincodeLogger.Debug("os.Args returns: %s", os.Args) chaincodeSupportClient := pb.NewChaincodeSupportClient(clientConn) err = chatWithPeer(chaincodeSupportClient, cc) return err }