func collectShaders(srcDirPath string, allShaders *shaderSrcSortables, incShaders map[string]string, stripComments bool) { var ( fileInfos []os.FileInfo fileName, shaderSource string isInc, isVert, isTessCtl, isTessEval, isGeo, isFrag, isComp bool pos1, pos2 int ) if src, err := os.Open(srcDirPath); err == nil { fileInfos, err = src.Readdir(0) src.Close() if err == nil { for _, fileInfo := range fileInfos { fileName = fileInfo.Name() if fileInfo.IsDir() { collectShaders(filepath.Join(srcDirPath, fileName), allShaders, incShaders, stripComments) } else { isInc, isVert, isTessCtl, isTessEval, isGeo, isFrag, isComp = strings.HasSuffix(fileName, ".glsl"), strings.HasSuffix(fileName, ".glvs"), strings.HasSuffix(fileName, ".gltc"), strings.HasSuffix(fileName, ".glte"), strings.HasSuffix(fileName, ".glgs"), strings.HasSuffix(fileName, ".glfs"), strings.HasSuffix(fileName, ".glcs") if isInc || isVert || isTessCtl || isTessEval || isGeo || isFrag || isComp { if shaderSource = ufs.ReadTextFile(filepath.Join(srcDirPath, fileName), false, ""); len(shaderSource) > 0 { if stripComments { for { if pos1, pos2 = strings.Index(shaderSource, "/*"), strings.Index(shaderSource, "*/"); (pos1 < 0) || (pos2 < pos1) { break } shaderSource = shaderSource[0:pos1] + shaderSource[pos2+2:] } } if isInc { incShaders[fileName] = shaderSource } if isVert { allShaders.vert = append(allShaders.vert, shaderSrc{fileName, shaderSource}) } if isTessCtl { allShaders.tessCtl = append(allShaders.tessCtl, shaderSrc{fileName, shaderSource}) } if isTessEval { allShaders.tessEval = append(allShaders.tessEval, shaderSrc{fileName, shaderSource}) } if isGeo { allShaders.geo = append(allShaders.geo, shaderSrc{fileName, shaderSource}) } if isFrag { allShaders.frag = append(allShaders.frag, shaderSrc{fileName, shaderSource}) } if isComp { allShaders.comp = append(allShaders.comp, shaderSrc{fileName, shaderSource}) } } } } } } } }
// Returns a BlogNav for the specified path. // For example, GetBlogArchive("blog") maps to "contents/blog/" func (me *PageContext) GetBlogArchive(path string) *BlogNav { if _, ok := SiteData.Blogs[path]; !ok { dirPath := dir("contents", path) handler := func(_ string) { items := BlogNavItems{} ufs.WalkAllFiles(dirPath, func(fullPath string) bool { if filepath.Dir(fullPath) != dirPath { vpath := fullPath[:len(fullPath)-len(filepath.Ext(fullPath))] vpath = vpath[len(dirPath):] navItem := BlogNavItem{} navItem.Href, navItem.Caption = filepath.ToSlash(vpath), filepath.Base(vpath) if src := ufs.ReadTextFile(fullPath, false, ""); len(src) > 0 { if pos1, pos2 := strings.Index(src, "<h2>"), strings.Index(src, "</h2>"); pos1 >= 0 && pos2 > pos1 { src = src[:pos2] navItem.Caption = src[pos1+4:] } } if pathItems := strings.Split(navItem.Href, "/"); len(pathItems) > 0 { if navItem.Year = pathItems[1]; len(pathItems) > 1 { if navItem.Month = pathItems[2]; len(pathItems) > 2 { navItem.Day = pathItems[3] } } } items = append(items, navItem) } return true }) sort.Sort(items) SiteData.Blogs[path] = BlogNav{Nav: items} } ufs.WalkAllDirs(dirPath, func(fullPath string) bool { DirWatch.WatchIn(fullPath, "*", false, handler) return true }) handler("") } copy := SiteData.Blogs[path] return © }