func ParseCPANLines(lines []string) (*CPANFile, error) { cpanfile := &CPANFile{} for _, l := range lines { if len(l) == 0 { continue } log.Trace("Parsing line: %s", l) dep, err := ParseCPANLine(l) if err != nil { log.Error("=> Error parsing line: %s", err) continue } if dep != nil { log.Info("=> Found dependency: %s", dep) cpanfile.AddDependency(dep) continue } log.Trace("=> No error and no dependency found") } log.Info("Found %d dependencies in cpanfile", len(cpanfile.Dependencies)) return cpanfile, nil }
// Listen binds to httpBindAddr func Listen(httpBindAddr string, Asset func(string) ([]byte, error), exitCh chan int, registerCallback func(http.Handler)) { log.Info("[HTTP] Binding to address: %s", httpBindAddr) pat := pat.New() registerCallback(pat) f := func(w http.ResponseWriter, req *http.Request) { if Authorised == nil { pat.ServeHTTP(w, req) return } u, pw, ok := req.BasicAuth() if !ok || !Authorised(u, pw) { w.Header().Set("WWW-Authenticate", "Basic") w.WriteHeader(401) return } pat.ServeHTTP(w, req) } err := http.ListenAndServe(httpBindAddr, http.HandlerFunc(f)) if err != nil { log.Fatalf("[HTTP] Error binding to address %s: %s", httpBindAddr, err) } }
func (s *Source) loadBackPANSource() error { log.Info("Loading BackPAN index: backpan-index") file, err := os.Open("backpan-index") if err != nil { log.Warn(err.Error()) return nil } index, err := ioutil.ReadAll(file) file.Close() if err != nil { log.Fatal(err) } for _, p := range strings.Split(string(index), "\n") { if !strings.HasPrefix(p, "authors/id/") { continue } //log.Printf("Parsing: %s\n", p) m := s.ModuleFromBackPANIndex(p) if m != nil { s.ModuleList[m.Name+"-"+m.Version] = m } } log.Printf("Found %d packages for source: %s", len(s.ModuleList), s) return nil }
func (ws *Workspace) ExecFunction(task *Task, name string, args ...string) string { log.Info("Executing function %s: %s", name, args) var fn *Function if f, ok := ws.Functions[name]; ok { fn = f } else if f, ok := GlobalWorkspace.Functions[name]; ok { fn = f } else { log.Warn("Function not found: %s", name) return "" } argmap := make(map[string]string) for i, arg := range fn.Args { argmap[arg] = args[i] } for k, v := range argmap { log.Info("argmap: %s => %s", k, v) for t, m := range task.Metadata { log.Info("meta: %s => %s", t, m) v = strings.Replace(v, "$"+t, m, -1) } argmap[k] = v } c := fn.Command for k, v := range argmap { log.Info("ARG: %s => %s", k, v) c = strings.Replace(c, k, v, -1) } var funcEnvironment map[string]string if ws.InheritEnvironment { funcEnvironment = ws.Environment } else if GlobalWorkspace.InheritEnvironment { funcEnvironment = GlobalWorkspace.Environment } else { funcEnvironment = make(map[string]string) } tsk := NewTask(nil, "Function$"+name, fn.Executor, c, funcEnvironment, false, "", "", make(map[string]string), "") ch := tsk.Start() <-ch return tsk.TaskRuns[0].StdoutBuf.String() }
func (s *Session) loadSessionData() { if sid_cookie, ok := s.Request.Cookies["__SID"]; ok { s.SessionID = sid_cookie.Value log.Info("Retrieved session ID (__SID): %s", s.SessionID) } s.readSessionData() }
func (deps *DependencyList) PrintDeps(d int) { for _, dep := range deps.Dependencies { if dep.Module == nil { log.Info(MkIndent(0)+"%s not found", dep.Name) continue } dep.Module.PrintDeps(d + 1) } }
func (s *Source) loadCPANSource() error { log.Info("Loading CPAN index: %s", s.Index) res, err := http.Get(s.Index) if err != nil { log.Warn("Error loading index: %s", err) return nil } // TODO optional gzip r, err := gzip.NewReader(res.Body) if err != nil { log.Warn(err.Error()) b, _ := ioutil.ReadAll(res.Body) log.Info("%s", string(b)) return nil } packages, err := ioutil.ReadAll(r) res.Body.Close() if err != nil { log.Warn(err) return nil } foundnl := false for _, p := range strings.Split(string(packages), "\n") { if !foundnl && len(p) == 0 { foundnl = true continue } if !foundnl || len(p) == 0 { continue } m := s.ModuleFromCPANIndex(p) s.ModuleList[m.Name] = m } log.Info("Found %d packages for source: %s", len(s.ModuleList), s) return nil }
func (r *Response) createSessionId() { bytes := make([]byte, 256) rand.Read(bytes) s, _ := bcrypt.GenerateFromPassword(bytes, 11) r.session.SessionID = string(s) log.Info("Generated session ID (__SID): %s", r.session.SessionID) r.Cookies.Set(&nethttp.Cookie{ Name: "__SID", Value: r.session.SessionID, Path: "/", }) }
func (ws *Workspace) GetColumn(task *Task, name string) string { log.Info("GetColumn: %s => %s", task.Name, name) col := ws.Columns[name] var fn []string var nm string for n, args := range col { nm = n fn = args break } return ws.ExecFunction(task, nm, fn...) }
func LoadConfig(global string, workspaces []string) { // Load global environment log.Info("Loading global environment file: %s", global) cfg, err := LoadConfigFile(global) if err != nil { log.Error("Error loading global configuration: %s", err.Error()) } if cfg != nil { GlobalConfigWorkspace = cfg } // Load workspaces for _, conf := range workspaces { log.Info("Loading workspace file: %s", conf) cfg, err := LoadConfigFile(conf) if err != nil { log.Error("Error loading workspace: %s", err.Error()) } if cfg != nil { ConfigWorkspaces[cfg.Name] = cfg } } }
func (s *Session) readSessionData() { sd, ok := s.Request.Cookies["__SD"] if ok && sd.Value != "" { log.Info("Retrieved session data (__SD): %s", sd.Value) sdata := make(map[string]string) err := json.Unmarshal([]byte(fromBase64(sd.Value)), &sdata) if err != nil { s.RenderException(500, err) return } s.SessionData = sdata } }
func runLoop() { conf, err := readConf() AssertNoErr(err, "Failed to read config file") log.Debug("Global conf: %#v", conf) nif := conf["config"]["if"] interval := atoi(conf["config"]["interval"], 5) for name := range notifyOnChange(nif, interval) { log.Info("Network changed: %s", name) if section, ok := conf["ssid:"+name]; ok { log.Debug("Found section: %v", section) ApplyCmds(section) } else { log.Debug("Undefined section for: %s", name) } } }
func ParseCPANLine(line string) (*Dependency, error) { if len(line) == 0 { return nil, nil } matches := re.FindStringSubmatch(line) if len(matches) == 0 { log.Trace("Unable to parse line: %s", line) return nil, nil } module := matches[2] version := strings.Replace(matches[4], " ", "", -1) comment := matches[5] dependency, err := DependencyFromString(module, version) if strings.HasPrefix(strings.Trim(comment, " "), "# REQS: ") { comment = strings.TrimPrefix(strings.Trim(comment, " "), "# REQS: ") log.Trace("Found additional dependencies: %s", comment) for _, req := range strings.Split(comment, ";") { req = strings.Trim(req, " ") bits := strings.Split(req, "-") new_dep, err := DependencyFromString(bits[0], bits[1]) if err != nil { log.Error("Error parsing REQS dependency: %s", req) continue } log.Trace("Added dependency: %s", new_dep) dependency.Additional = append(dependency.Additional, new_dep) } } if err != nil { return nil, err } log.Info("%s (%s %s)", module, dependency.Modifier, dependency.Version) return dependency, err }
func (s *Source) Find(d *Dependency) (*Module, error) { log.Debug("Finding dependency: %s", d) switch s.Type { case "SmartPAN": log.Debug("=> Using SmartPAN source") url := s.URL if !strings.HasSuffix(s.URL, "/") { url += "/" } url += "where/" + d.Name + "/" + d.Modifier + d.Version log.Info("Query: %s", url) res, err := http.Get(url) if err != nil { log.Error("Error querying SmartPAN: %s", err.Error()) return nil, err } defer res.Body.Close() body, err := ioutil.ReadAll(res.Body) log.Trace("Got response: %s", string(body)) if res.StatusCode != http.StatusOK { log.Info("Module not found in SmartPAN: %s", d.Name) return nil, nil } var v *WhereOutput if err = json.Unmarshal(body, &v); err != nil { log.Error("Error parsing JSON: %s", err.Error()) return nil, err } log.Trace("Found module %s", v.Module) if len(v.Versions) == 0 { log.Info("Found module but no versions returned") return nil, nil } var lv *VersionOutput for _, ver := range v.Versions { if ver.Version == v.Latest { log.Info("Using latest version of %s: %f", v.Module, ver.Version) lv = ver break } } if lv == nil { log.Info("Couldn't find latest version, selecting first available") lv = v.Versions[0] } return &Module{ Name: d.Name, Version: fmt.Sprintf("%f", lv.Version), Source: s, Url: lv.URL, }, nil case "CPAN": log.Debug("=> Using CPAN source") if mod, ok := s.ModuleList[d.Name]; ok { log.Trace("=> Found in source: %s", mod) if d.Matches(mod) { log.Trace("=> Version (%s) matches dependency: %s", mod.Version, d) return mod, nil } log.Trace("=> Version (%s) doesn't match dependency: %s", mod.Version, d) return nil, nil } case "BackPAN": log.Debug("=> Using BackPAN source") // TODO better version matching - new backpan index? if mod, ok := s.ModuleList[d.Name+"-"+d.Version]; ok { log.Trace("=> Found in source: %s", mod) if d.Matches(mod) { log.Trace("=> Version (%s) matches dependency: %s", mod.Version, d) return mod, nil } log.Trace("=> Version (%s) doesn't match dependency: %s", mod.Version, d) return nil, nil } case "MetaCPAN": log.Debug("=> Using MetaCPAN source") var sout, serr bytes.Buffer var cpanm_args string = fmt.Sprintf("-L %s --info %s~\"%s%s\"", config.InstallDir, d.Name, d.Modifier, d.Version) cpanm_cache_dir, err := filepath.Abs(config.CacheDir) if err != nil { log.Error("Failed to get absolute path of gopan cache directory: %s", err) return nil, err } log.Trace("About to exec: cpanm %s", cpanm_args) os.Setenv("CPANM_INFO_ARGS", cpanm_args) os.Setenv("PERL_CPANM_HOME", cpanm_cache_dir) cmd := exec.Command("bash", "-c", `eval cpanm $CPANM_INFO_ARGS`) cmd.Stdout = &sout cmd.Stderr = &serr if err := cmd.Run(); err != nil { log.Error("cpanm %s: %s,\n%s\n", cpanm_args, err, serr.String()) return nil, nil } if 0 == len(sout.String()) { log.Warn("No author/module from cpanm") return nil, nil } author_module := strings.TrimRight(sout.String(), "\n") mematches := metacpanRe.FindStringSubmatch(author_module) if nil == mematches { log.Error("Match failed for: %s", author_module) return nil, nil } log.Trace("Resolved: %s", author_module) for _, mesource := range config.MetaSources { meurl := fmt.Sprintf("authors/id/%s/%s/%s", mematches[1][0:1], mematches[1][0:2], mematches[0]) archive_url := fmt.Sprintf("%s/%s", mesource.URL, meurl) log.Trace("Checking: " + archive_url) resp, err := http.Head(archive_url) if err != nil { log.Trace(err) continue } log.Trace("HEAD status code: %d", resp.StatusCode) if 200 == resp.StatusCode { // No module/version check since 'cpanm --info' may resolve to // archive and version that may not match source return &Module{ Name: mematches[2], Version: mematches[3], Source: mesource, Url: meurl, }, nil } } log.Error("Could not get archive URL via 'cpanm %s'", cpanm_args) return nil, nil default: log.Error("Unrecognised source type: %s", s.Type) return nil, errors.New(fmt.Sprintf("Unrecognised source: %s", s)) } log.Trace("=> Not found in source") return nil, nil }
func LoadIndex(index string) map[string]*Source { indexes := make(map[string]*Source) log.Info("Loading cached index file %s", index) if _, err := os.Stat(index); err != nil { log.Error("Cached index file not found") return indexes } var bytes []byte if strings.HasSuffix(index, ".gz") { fi, err := os.Open(index) if err != nil { log.Error("Error reading index: %s", err.Error()) return indexes } defer fi.Close() gz, err := gzip.NewReader(fi) if err != nil { log.Error("Error creating gzip reader: %s", err.Error()) return indexes } bytes, err = ioutil.ReadAll(gz) if err != nil { log.Error("Error reading from gzip: %s", err.Error()) return indexes } } else { var err error bytes, err = ioutil.ReadFile(index) if err != nil { log.Error("Error reading index: %s", err.Error()) return indexes } } lines := strings.Split(string(bytes), "\n") var csource *Source var cauth *Author var cpkg *Package resrcauth := regexp.MustCompile("^\\s*(.*)\\s\\[(.*)\\]\\s*$") repackage := regexp.MustCompile("^\\s*(.*)\\s=>\\s(.*)\\s*$") reprovides := regexp.MustCompile("^\\s*(.*)\\s\\((.*)\\):\\s(.*)\\s*$") for _, l := range lines { log.Trace("Line: %s", l) if strings.HasPrefix(l, " ") { // provides log.Trace("=> Provides") match := reprovides.FindStringSubmatch(l) if len(match) > 0 { if strings.HasPrefix(match[1], "-") { log.Trace(" - Is a removal") match[1] = strings.TrimPrefix(match[1], "-") if _, ok := cpkg.Provides[match[1]]; ok { delete(cpkg.Provides, match[1]) } if len(cpkg.Provides) == 0 { delete(cauth.Packages, cpkg.Name) cpkg = nil } if len(cauth.Packages) == 0 { delete(csource.Authors, cauth.Name) cauth = nil } if len(csource.Authors) == 0 { delete(indexes, csource.Name) csource = nil } } else { cpkg.Provides[match[1]] = &PerlPackage{ Name: match[1], Version: match[2], Package: cpkg, File: match[3], } } } } else if strings.HasPrefix(l, " ") { // its a package log.Trace("=> Package") match := repackage.FindStringSubmatch(l) if len(match) > 0 { if strings.HasPrefix(match[1], "-") { log.Trace(" - Is a removal") match[1] = strings.TrimPrefix(match[1], "-") if _, ok := cauth.Packages[match[1]]; ok { delete(cauth.Packages, match[1]) } if len(cauth.Packages) == 0 { delete(csource.Authors, cauth.Name) cauth = nil } if len(csource.Authors) == 0 { delete(indexes, csource.Name) csource = nil } } else { if _, ok := cauth.Packages[match[1]]; ok { // we've seen this package before log.Trace("Seen this package before: %s", match[1]) cpkg = cauth.Packages[match[1]] continue } cpkg = &Package{ Name: match[1], URL: match[2], Author: cauth, Provides: make(map[string]*PerlPackage), } cauth.Packages[match[1]] = cpkg } } } else if strings.HasPrefix(l, " ") { // its an author log.Trace("=> Author") match := resrcauth.FindStringSubmatch(l) if len(match) > 0 { if strings.HasPrefix(match[1], "-") { log.Trace(" - Is a removal") match[1] = strings.TrimPrefix(match[1], "-") if _, ok := csource.Authors[match[1]]; ok { delete(csource.Authors, match[1]) } if len(csource.Authors) == 0 { delete(indexes, csource.Name) csource = nil } } else { if _, ok := csource.Authors[match[1]]; ok { // we've seen this author before log.Trace("Seen this author before: %s", match[1]) cauth = csource.Authors[match[1]] continue } cauth = &Author{ Name: match[1], URL: match[2], Source: csource, Packages: make(map[string]*Package, 0), } csource.Authors[match[1]] = cauth } } } else { // its a source log.Trace("=> Source") match := resrcauth.FindStringSubmatch(l) if len(match) > 0 { if strings.HasPrefix(match[1], "-") { log.Trace(" - Is a removal") match[1] = strings.TrimPrefix(match[1], "-") if _, ok := indexes[match[1]]; ok { delete(indexes, match[1]) } } else { seen := false for _, idx := range indexes { if idx.Name == match[1] { // we've seen this source before log.Trace("Seen this source before: %s", idx.Name) csource = idx seen = true break } } if seen { continue } csource = &Source{ Name: match[1], URL: match[2], Authors: make(map[string]*Author, 0), } indexes[csource.Name] = csource } } } } for _, source := range indexes { log.Trace(source.Name) for _, author := range source.Authors { log.Trace(" %s", author.Name) for _, pkg := range author.Packages { log.Trace(" %s => %s", pkg.Name, pkg.URL) } } } return indexes }
func where(session *http.Session) { module := session.Stash["module"].(string) log.Info("Looking for module: %s", module) ns := strings.Split(module, "::") if _, ok := packages[ns[0]]; !ok { log.Info("Top-level namespace [%s] not found", ns[0]) session.Response.Status = 404 session.Response.Send() return } mod := packages[ns[0]] ns = ns[1:] for len(ns) > 0 { if _, ok := mod.Children[ns[0]]; !ok { log.Info("Child namespace [%s] not found", ns[0]) session.Response.Status = 404 session.Response.Send() return } log.Info("Found child namespace [%s]", ns[0]) mod = mod.Children[ns[0]] ns = ns[1:] } var version string if _, ok := session.Stash["version"]; ok { version = session.Stash["version"].(string) if strings.HasPrefix(version, "v") { version = strings.TrimPrefix(version, "v") } log.Info("Looking for version: %s", version) } if len(mod.Versions) == 0 { log.Info("Module has no versions in index") session.Response.Status = 404 session.Response.Send() return } versions := make([]*VersionOutput, 0) lv := float64(0) if len(version) > 0 { if ">0" == version { // Take account of packages that have a version of 'undef' for _, pkg := range mod.Packages { packageURL := pkg.Package.VirtualURL() urlmatches := packageUrlRe.FindStringSubmatch(packageURL) if nil == urlmatches { log.Info("Version requested [%s] not found", version) session.Response.Status = 404 session.Response.Send() return } ver := gopan.VersionFromString(urlmatches[2]) log.Info("Found version: %f", ver) versions = append(versions, &VersionOutput{ Index: pkg.Package.Author.Source.Name, URL: packageURL, Path: pkg.Package.AuthorURL(), Version: ver, }) if ver > lv { lv = ver } } } else { dep, _ := getpan.DependencyFromString(module, version) for _, md := range mod.Versions { var sver string = md.Version if `undef` == md.Version { sver = fmt.Sprintf("%.2f", md.Package.Version()) } log.Info("Matching [%s] against derived version [%s] (md.Version [%s], md.Package.Version [%f])", dep.Version, sver, md.Version, md.Package.Version()) if dep.MatchesVersion(sver) { vout := &VersionOutput{ Index: md.Package.Author.Source.Name, URL: md.Package.VirtualURL(), Path: md.Package.AuthorURL(), Version: gopan.VersionFromString(sver), } versions = append(versions, vout) ver := gopan.VersionFromString(sver) if ver > lv { lv = ver } } } } if len(versions) == 0 { log.Info("Version requested [%s] not found", version) session.Response.Status = 404 session.Response.Send() return } } else { for v, pkg := range mod.Versions { log.Info("Found version: %f", v) versions = append(versions, &VersionOutput{ Index: pkg.Package.Author.Source.Name, URL: pkg.Package.VirtualURL(), Path: pkg.Package.AuthorURL(), Version: gopan.VersionFromString(pkg.Version), }) if v > lv { lv = v } } } session.Response.Headers.Set("Content-Type", "application/json") o := &WhereOutput{ Module: mod.FullName(), Latest: lv, Versions: versions, } b, err := json.MarshalIndent(o, "", " ") log.Info("Output: %s", string(b)) if err != nil { log.Error("Failed encoding JSON: %s", err.Error()) session.Response.Status = 500 session.Response.Send() return } session.Response.Status = 200 session.Response.Write(b) }
func main() { config = getpan.Configure() config.Dump() mods := flag.Args() if len(mods) == 0 { if _, err := os.Stat(config.CPANFile); os.IsNotExist(err) { log.Error("cpanfile not found: %s", config.CPANFile) os.Exit(1) } } if len(mods) > 0 && mods[0] == "exec" { log.Debug("getpan exec => " + strings.Join(mods[1:], " ")) cmd := exec.Command(mods[1], mods[2:]...) cmd.Env = os.Environ() cmd.Env = append(cmd.Env, "PERL5LIB="+config.InstallDir+"/lib/perl5") cmd.Env = append(cmd.Env, "PATH="+os.Getenv("PATH")+":"+config.InstallDir+"/bin") cmd.Stdout = os.Stdout cmd.Stderr = os.Stderr err := cmd.Run() if err != nil { // debug so it doesn't show up in stdout/stderr unless -loglevel is used log.Debug("Error in exec: %s", err.Error()) os.Exit(10) } return } for _, source := range config.Sources { err := source.Load() if err != nil { log.Error("Error loading sources: %s", err) os.Exit(1) return } } var deps *getpan.DependencyList if len(mods) == 0 { log.Info("Installing from cpanfile: %s", config.CPANFile) d, err := getpan.ParseCPANFile(config.CPANFile) if err != nil { log.Error("Error parsing cpanfile: %s", err) os.Exit(2) return } deps = &d.DependencyList } else { log.Info("Installing from command line args") deps = &getpan.DependencyList{ Dependencies: make([]*getpan.Dependency, 0), } for _, arg := range mods { dependency, err := getpan.DependencyFromString(arg, "") if err != nil { log.Error("Unable to parse input: %s", arg) continue } deps.AddDependency(dependency) } } err := deps.Resolve() if err != nil { log.Error("Error resolving dependencies: %s", err) os.Exit(3) return } if false == config.NoDepdump { log.Info("Resolved dependency tree:") deps.PrintDeps(0) } if config.NoInstall { log.Info("Skipping installation phase") return } _, err = deps.Install() if err != nil { log.Error("Error installing dependencies: %s", err) os.Exit(4) return } // FIXME hacky, need a better way of tracking installed deps log.Info("Successfully installed %d modules", deps.UniqueInstalled()) }
func main() { log.Logger().SetAppender(NewAppender()) global := "websysd.json" flag.StringVar(&global, "global", global, "global environment configuration") workspaces := make([]string, 0) flag.Var((*AppendSliceValue)(&workspaces), "workspace", "websysd workspace file (can be specified multiple times), defaults to './workspace.json'") // Create our Gotcha application var app = gotcha.Create(Asset) if len(workspaces) == 0 { workspaces = append(workspaces, "./workspace.json") } LoadConfig(global, workspaces) GlobalWorkspace = NewWorkspace(GlobalConfigWorkspace.Name, GlobalConfigWorkspace.Environment, make(map[string]map[string][]string), GlobalConfigWorkspace.InheritEnvironment) for fn, args := range GlobalConfigWorkspace.Functions { log.Info("=> Creating global function: %s", fn) GlobalWorkspace.Functions[fn] = &Function{ Name: fn, Args: args.Args, Command: args.Command, Executor: args.Executor, } } if GlobalWorkspace.InheritEnvironment { log.Info("=> Inheriting process environment into global workspace") for _, k := range os.Environ() { p := strings.SplitN(k, "=", 2) if strings.TrimSpace(p[0]) == "" { log.Warn("Skipping empty environment key") continue } log.Info(" %s = %s", p[0], p[1]) // TODO variable subst for current env vars if _, ok := GlobalWorkspace.Environment[p[0]]; !ok { GlobalWorkspace.Environment[p[0]] = p[1] } } } for _, ws := range ConfigWorkspaces { log.Info("=> Creating workspace: %s", ws.Name) var workspace *Workspace if wks, ok := Workspaces[ws.Name]; ok { log.Warn("Workspace %s already exists, merging tasks and environment") workspace = wks } else { workspace = NewWorkspace(ws.Name, ws.Environment, ws.Columns, ws.InheritEnvironment) Workspaces[ws.Name] = workspace } workspace.IsLocked = ws.IsLocked if workspace.InheritEnvironment && !GlobalWorkspace.InheritEnvironment { log.Info("=> Inheriting process environment into workspace") for _, k := range os.Environ() { p := strings.SplitN(k, "=", 2) if strings.TrimSpace(p[0]) == "" { log.Warn("Skipping empty environment key") continue } log.Info(" %s = %s", p[0], p[1]) // TODO variable subst for current env vars if _, ok := workspace.Environment[p[0]]; !ok { workspace.Environment[p[0]] = p[1] } } } for fn, args := range ws.Functions { log.Info("=> Creating workspace function: %s", fn) workspace.Functions[fn] = &Function{ Name: fn, Args: args.Args, Command: args.Command, Executor: args.Executor, } } for _, t := range ws.Tasks { log.Info("=> Creating task: %s", t.Name) if _, ok := workspace.Tasks[t.Name]; ok { log.Warn("Task %s already exists, overwriting") } env := make(map[string]string) for k, v := range GlobalWorkspace.Environment { env[k] = v } for k, v := range ws.Environment { env[k] = v } for k, v := range t.Environment { env[k] = v } task := NewTask(workspace, t.Name, t.Executor, t.Command, env, t.Service, t.Stdout, t.Stderr, t.Metadata, t.Pwd) workspace.Tasks[t.Name] = task } } // Get the router r := app.Router // Create some routes r.Get("/", list_workspaces) r.Get("/favicon.ico", r.Static("assets/favicon.ico")) r.Get("/log", show_log) r.Get("/workspace/(?P<workspace>[^/]+)", list_tasks) // Serve static content (but really use a CDN) r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}")) r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}")) r.Post("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/start", startTask) r.Post("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/stop", stopTask) r.Post("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/enable", enableServiceTask) r.Post("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/disable", disableServiceTask) r.Get("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)", taskHistory) r.Get("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/run/(?P<run>\\d+)", taskRun) r.Get("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/run/(?P<run>\\d+)/stdout", taskRunStdout) r.Get("/workspace/(?P<workspace>[^/]+)/task/(?P<task>[^/]+)/run/(?P<run>\\d+)/stderr", taskRunStderr) // Start our application app.Start() defer func() { for _, ws := range Workspaces { for _, t := range ws.Tasks { if t.ActiveTask != nil && t.ActiveTask.Cmd != nil && t.ActiveTask.Cmd.Process != nil { t.ActiveTask.Cmd.Process.Kill() } } } }() <-make(chan int) }
func (tr *TaskRun) Start(exitCh chan int) { tr.Started = time.Now() stdout, err := tr.Cmd.StdoutPipe() if err != nil { tr.Error = err exitCh <- 1 return } stderr, err := tr.Cmd.StderrPipe() if err != nil { tr.Error = err exitCh <- 1 return } if len(tr.Stdout) > 0 { wr, err := NewFileLogWriter(tr.Stdout) if err != nil { log.Error("Unable to open file %s: %s", tr.Stdout, err.Error()) tr.StdoutBuf = NewInMemoryLogWriter() } else { tr.StdoutBuf = wr } } else { tr.StdoutBuf = NewInMemoryLogWriter() } if len(tr.Stderr) > 0 { wr, err := NewFileLogWriter(tr.Stderr) if err != nil { log.Error("Unable to open file %s: %s", tr.Stderr, err.Error()) tr.StderrBuf = NewInMemoryLogWriter() } else { tr.StderrBuf = wr } } else { tr.StderrBuf = NewInMemoryLogWriter() } if len(tr.Pwd) > 0 { log.Info("Setting pwd: %s", tr.Pwd) tr.Cmd.Dir = tr.Pwd } for k, v := range tr.Environment { log.Info("Adding env var %s = %s", k, v) tr.Cmd.Env = append(tr.Cmd.Env, k+"="+v) } err = tr.Cmd.Start() if tr.Cmd.Process != nil { ev := &Event{time.Now(), fmt.Sprintf("Process %d started: %s", tr.Cmd.Process.Pid, tr.Command)} log.Info(ev.Message) tr.Events = append(tr.Events, ev) } if err != nil { tr.Error = err log.Error(err.Error()) tr.StdoutBuf.Close() tr.StderrBuf.Close() exitCh <- 1 return } go func() { go io.Copy(tr.StdoutBuf, stdout) go io.Copy(tr.StderrBuf, stderr) tr.Cmd.Wait() tr.StdoutBuf.Close() tr.StderrBuf.Close() log.Trace("STDOUT: %s", tr.StdoutBuf.String()) log.Trace("STDERR: %s", tr.StderrBuf.String()) ps := tr.Cmd.ProcessState sy := ps.Sys().(syscall.WaitStatus) ev := &Event{time.Now(), fmt.Sprintf("Process %d exited with status %d", ps.Pid(), sy.ExitStatus())} log.Info(ev.Message) tr.Events = append(tr.Events, ev) log.Info(ps.String()) tr.Stopped = time.Now() exitCh <- 1 }() }
func main() { configure() log.Logger().SetLevel(log.Stol(config.LogLevel)) log.Info("Using log level: %s", config.LogLevel) indexes = make(map[string]map[string]*gopan.Source) if !config.NoCache { indexes[config.Index] = gopan.LoadIndex(config.CacheDir + "/" + config.Index) } if config.NoCache || config.Update { for _, s := range config.Sources { b := strings.SplitN(s, "=", 2) if len(b) < 2 { log.Error("Expected Name=URL pair, got: %s", s) return } if idx, ok := indexes[config.Index][b[0]]; ok { log.Warn("Index [%s] already exists with URL [%s], updating to [%s]", idx.URL, b[1]) idx.URL = b[1] } else { indexes[config.Index][b[0]] = &gopan.Source{ Name: b[0], URL: b[1], Authors: make(map[string]*gopan.Author, 0), } } } if len(config.Sources) == 0 && !config.CPAN && !config.BackPAN { log.Debug("No -source, -cpan, -backpan parameters, adding default CPAN/BackPAN") config.CPAN = true config.BackPAN = true } if config.CPAN { if _, ok := indexes[config.Index]["CPAN"]; !ok { log.Debug("Adding CPAN index") indexes[config.Index]["CPAN"] = gopan.CPANSource() } else { log.Debug("CPAN index already exists") } } if config.BackPAN { if _, ok := indexes[config.Index]["BackPAN"]; !ok { log.Debug("Adding BackPAN index") indexes[config.Index]["BackPAN"] = gopan.BackPANSource() } else { log.Debug("BackPAN index already exists") } } log.Info("Using sources:") for fname, _ := range indexes { log.Info("From %s", fname) for _, source := range indexes[fname] { log.Info("=> %s", source.String()) } } newAuthors := getAuthors() newPackages := getPackages() os.MkdirAll(config.CacheDir, 0777) if !config.NoCache { gopan.SaveIndex(config.CacheDir+"/"+config.Index, indexes[config.Index]) } if config.Update { log.Info("Found %d new packages by %d new authors", newAuthors, newPackages) } } nsrc, nauth, nmod, npkg := gopan.CountIndex(indexes) log.Info("Found %d packages in %d modules by %d authors from %d sources", npkg, nmod, nauth, nsrc) if !config.NoMirror { mirrorPan() } }
func main() { configure() indexes = make(map[string]map[string]*gopan.Source) indexes[config.InputIndex] = gopan.LoadIndex(config.CacheDir + "/" + config.InputIndex) log.Logger().SetLevel(log.Stol(config.LogLevel)) log.Info("Using log level: %s", config.LogLevel) // FIXME inefficient _, _, tpkg, _ := gopan.CountIndex(indexes) npkg := 0 nmod := 0 var pc = func() float64 { return float64(nmod) / float64(tpkg) * 100 } log.Info("Writing packages index file") out, err := os.Create(config.CacheDir + "/" + config.OutputIndex) if err != nil { log.Error("Error creating packages index: %s", err.Error()) return } for fname, _ := range indexes { log.Debug("File: %s", fname) for _, idx := range indexes[fname] { log.Debug("Index: %s", idx) out.Write([]byte(idx.Name + " [" + idx.URL + "]\n")) for _, auth := range idx.Authors { log.Debug("Author %s", auth) out.Write([]byte(" " + auth.Name + " [" + auth.URL + "]\n")) for _, pkg := range auth.Packages { out.Write([]byte(" " + pkg.Name + " => " + pkg.URL + "\n")) log.Debug("Package: %s", pkg) if !config.Flatten { if len(pkg.Provides) == 0 { // TODO better handling of filenames modnm := strings.TrimSuffix(pkg.Name, ".tar.gz") tgzpath := config.CacheDir + "/" + idx.Name + "/" + auth.Name[:1] + "/" + auth.Name[:2] + "/" + auth.Name + "/" + pkg.Name if _, err := os.Stat(tgzpath); err != nil { log.Error("File not found: %s", tgzpath) return } extpath := config.ExtDir + "/" + idx.Name + "/" + auth.Name[:1] + "/" + auth.Name[:2] + "/" + auth.Name + "/" + modnm dirpath := config.ExtDir + "/" + idx.Name + "/" + auth.Name[:1] + "/" + auth.Name[:2] + "/" + auth.Name log.Trace("=> tgzpath: %s", tgzpath) log.Trace(" > extpath: %s", extpath) log.Trace(" > dirpath: %s", dirpath) // Only index packages if they don't already exist if err := pandex.Provides(pkg, tgzpath, extpath, dirpath); err != nil { log.Error("Error retrieving package list: %s", err) continue } } npkg += len(pkg.Provides) nmod += 1 for p, pk := range pkg.Provides { out.Write([]byte(" " + p + " (" + pk.Version + "): " + pk.File + "\n")) } if nmod > 0 && nmod%100 == 0 { log.Info("%f%% Done %d/%d packages (%d provided so far)", pc(), nmod, tpkg, npkg) } } } } } } out.Close() log.Info("Found %d packages from %d modules", npkg, nmod) }
func (r *Response) EventStream() chan []byte { c := make(chan []byte) r.IsEventStream = true r.Headers.Add("Content-Type", "text/event-stream") r.Headers.Add("Cache-Control", "no-cache") r.Headers.Add("Connection", "keep-alive") //r.Write([]byte("\n\n")) r.Send() hj, ok := r.writer.(nethttp.Hijacker) if !ok { log.Warn("Connection unsuitable for hijack") return nil } conn, bufrw, err := hj.Hijack() if err != nil { log.Warn("Connection hijack failed") return nil } r.esBufrw = bufrw r.esConn = conn go func() { for b := range c { if len(b) == 0 { log.Trace("Event stream ended") r.esConn.Close() break } lines := strings.Split(string(b), "\n") data := "" for _, l := range lines { data += "data: " + l + "\n" } data += "\n" sz := len(data) + 1 log.Info("Event stream message is %d bytes", sz) size := fmt.Sprintf("%X", sz) r.esBufrw.Write([]byte(size + "\r\n")) lines = strings.Split(data, "\n") for _, ln := range lines { r.esBufrw.Write([]byte(ln + "\n")) } _, err := r.esBufrw.Write([]byte("\r\n")) if err != nil { log.Error("Error writing to connection: %s\n", err) r.esConn.Close() break } err = r.esBufrw.Flush() if err != nil { log.Error("Error flushing buffer: %s\n", err) r.esConn.Close() break } } }() return c }
func (s *settings) ValidateAndUseFile(filePath string) { cfg := loadConfigFile(filePath) cfg.Validate() s.c = cfg log.Info("Config loaded " + filePath) }
func examplepost(session *http.Session) { m := &ExampleForm{} session.Stash["fh"] = form.New(session, m).Populate(false).Validate() log.Info("Got posted title: %s", m.Title) example(session) }
func do_import(session *http.Session, job *ImportJob) { log.Info("Running import job %s", job.Id) reponame := job.Form.ImportInto if reponame == "new_index" { reponame = job.Form.NewIndex } msg := func(m string) { if m != ":DONE" { job.History = append(job.History, m) log.Info(m) } for _, w := range job.Watchers { w(m) } } mods := make([]*getpan.Module, 0) // TODO cpanm mirror when using getpan_import if len(job.Form.Cpanfile) > 0 { msg("Parsing cpanfile input") _, modules := getpan_import(job, msg) mods = append(mods, modules...) } if len(job.Form.ImportURL) > 0 { msg("Importing from URL: " + job.Form.ImportURL) // TODO support cpanfile urls nauth := job.Form.AuthorID if len(nauth) < 3 { // FIXME move to form validation msg("Author ID must be at least 3 characters") msg(":DONE") job.Complete = true return } npath := config.CacheDir + "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth _, fn := filepath.Split(job.Form.ImportURL) nfile := npath + "/" + fn msg("Caching to " + nfile) if _, err := os.Stat(nfile); err != nil { os.MkdirAll(npath, 0777) out, err := os.Create(nfile) if err != nil { msg(err.Error()) msg(":DONE") job.Complete = true return } url := job.Form.ImportURL log.Trace("Downloading: %s", url) resp, err := nethttp.Get(url) if err != nil { msg(err.Error()) msg(":DONE") job.Complete = true return } _, err = io.Copy(out, resp.Body) if err != nil { msg(err.Error()) msg(":DONE") job.Complete = true return } out.Close() resp.Body.Close() } else { log.Trace("File already exists in cache: %s", nfile) } fn = strings.TrimSuffix(fn, ".tar.gz") bits := strings.Split(fn, "-") name := strings.Join(bits[0:len(bits)-1], "-") version := bits[len(bits)-1] s := getpan.NewSource("CPAN", "/modules/02packages.details.txt.gz", "") m := &getpan.Module{ Source: s, Name: name, Version: version, Url: "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth + "/" + fn, Cached: nfile, Dir: npath, } m.Deps = &getpan.DependencyList{ Parent: m, Dependencies: make([]*getpan.Dependency, 0), } mods = append(mods, m) } if len(job.Form.FromDir) > 0 { msg("Importing from local directory: " + job.Form.FromDir) // TODO support cpanfile paths nauth := job.Form.AuthorID if len(nauth) < 3 { // FIXME move to form validation msg("Author ID must be at least 3 characters") msg(":DONE") job.Complete = true return } npath := config.CacheDir + "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth _, fn := filepath.Split(job.Form.FromDir) nfile := npath + "/" + fn msg("Caching to " + nfile) _, err := CopyFile(nfile, job.Form.FromDir) if err != nil { msg(err.Error()) msg(":DONE") job.Complete = true return } fn = strings.TrimSuffix(fn, ".tar.gz") bits := strings.Split(fn, "-") name := strings.Join(bits[0:len(bits)-1], "-") version := bits[len(bits)-1] s := getpan.NewSource("CPAN", "/modules/02packages.details.txt.gz", "") m := &getpan.Module{ Source: s, Name: name, Version: version, Url: "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth + "/" + fn, Cached: nfile, Dir: npath, } m.Deps = &getpan.DependencyList{ Parent: m, Dependencies: make([]*getpan.Dependency, 0), } mods = append(mods, m) } if f, fh, err := session.Request.File("fromfile"); err == nil { fn := fh.Filename msg("Importing from uploaded module/cpanfile: " + fn) if !strings.HasSuffix(fn, ".tar.gz") && fn != "cpanfile" { msg("Only cpanfile and *.tar.gz files are supported") msg(":DONE") job.Complete = true return } if fn == "cpanfile" { msg("Importing cpanfile") b, _ := ioutil.ReadAll(f) f.Close() job.Form.Cpanfile = string(b) _, modules := getpan_import(job, msg) mods = append(mods, modules...) } else { msg("Importing .tar.gz") nauth := job.Form.AuthorID if len(nauth) < 3 { // FIXME move to form validation msg("Author ID must be at least 3 characters") msg(":DONE") job.Complete = true return } npath := config.CacheDir + "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth _, fn = filepath.Split(fn) nfile := npath + "/" + fn msg("Caching to " + nfile) os.MkdirAll(npath, 0777) _, err := CopyToFile(nfile, f) if err != nil { msg(err.Error()) msg(":DONE") job.Complete = true return } fn = strings.TrimSuffix(fn, ".tar.gz") bits := strings.Split(fn, "-") name := strings.Join(bits[0:len(bits)-1], "-") version := bits[len(bits)-1] s := getpan.NewSource("CPAN", "/modules/02packages.details.txt.gz", "") m := &getpan.Module{ Source: s, Name: name, Version: version, Url: "/authors/id/" + nauth[:1] + "/" + nauth[:2] + "/" + nauth + "/" + fn, Cached: nfile, Dir: npath, } m.Deps = &getpan.DependencyList{ Parent: m, Dependencies: make([]*getpan.Dependency, 0), } mods = append(mods, m) } } else { // there is no file... so no error //msg("Error importing file upload: " + err.Error()) } if len(mods) == 0 { msg("Nothing to do") msg(":DONE") job.Complete = true return } msg("Adding modules to GoPAN index") for _, m := range mods { msg("=> " + m.Name + " (" + m.Cached + ")") dn, fn := filepath.Split(m.Cached) dnb := strings.Split(strings.TrimSuffix(dn, string(os.PathSeparator)), string(os.PathSeparator)) auth := dnb[len(dnb)-1] ndir := config.CacheDir + "/" + reponame + "/" + auth[:1] + "/" + auth[:2] + "/" + auth npath := ndir + "/" + fn if _, err := os.Stat(npath); err == nil { msg(" | Already exists in repository") } else { os.MkdirAll(ndir, 0777) msg(" | Copying to " + npath) _, err := CopyFile(npath, m.Cached) if err != nil { msg(" ! " + err.Error()) continue } } if _, ok := indexes[config.Index][reponame]; !ok { msg(" | Creating index: " + reponame) indexes[config.Index][reponame] = &gopan.Source{ Name: reponame, URL: "/authors/id", Authors: make(map[string]*gopan.Author), } mapped[reponame] = make(map[string]map[string]map[string]*gopan.Author) } if _, ok := indexes[config.Index][reponame].Authors[auth]; !ok { msg(" | Creating author: " + auth) author := &gopan.Author{ Source: indexes[config.Index][reponame], Name: auth, Packages: make(map[string]*gopan.Package), URL: "/authors/id/" + auth[:1] + "/" + auth[:2] + "/" + auth + "/", } indexes[config.Index][reponame].Authors[auth] = author if _, ok := mapped[reponame]; !ok { mapped[reponame] = make(map[string]map[string]map[string]*gopan.Author) } // author name if _, ok := mapped[reponame][author.Name[:1]]; !ok { mapped[reponame][author.Name[:1]] = make(map[string]map[string]*gopan.Author) } if _, ok := mapped[reponame][author.Name[:1]][author.Name[:2]]; !ok { mapped[reponame][author.Name[:1]][author.Name[:2]] = make(map[string]*gopan.Author) } mapped[reponame][author.Name[:1]][author.Name[:2]][author.Name] = author // wildcards if _, ok := mapped[reponame]["*"]; !ok { mapped[reponame]["*"] = make(map[string]map[string]*gopan.Author) } if _, ok := mapped[reponame]["*"]["**"]; !ok { mapped[reponame]["*"]["**"] = make(map[string]*gopan.Author) } mapped[reponame]["*"]["**"][author.Name] = author // combos if _, ok := mapped[reponame][author.Name[:1]]["**"]; !ok { mapped[reponame][author.Name[:1]]["**"] = make(map[string]*gopan.Author) } if _, ok := mapped[reponame]["*"][author.Name[:2]]; !ok { mapped[reponame]["*"][author.Name[:2]] = make(map[string]*gopan.Author) } mapped[reponame][author.Name[:1]]["**"][author.Name] = author mapped[reponame]["*"][author.Name[:2]][author.Name] = author } if _, ok := indexes[config.Index][reponame].Authors[auth].Packages[fn]; !ok { msg(" | Creating module: " + fn) indexes[config.Index][reponame].Authors[auth].Packages[fn] = &gopan.Package{ Author: indexes[config.Index][reponame].Authors[auth], Name: fn, URL: indexes[config.Index][reponame].Authors[auth].URL + fn, Provides: make(map[string]*gopan.PerlPackage), } msg(" | Getting list of packages") modnm := strings.TrimSuffix(fn, ".tar.gz") pkg := indexes[config.Index][reponame].Authors[auth].Packages[fn] if err := pandex.Provides(pkg, npath, ndir+"/"+modnm, ndir); err != nil { msg(" ! Error retrieving package list for " + pkg.Name + ": " + err.Error()) } //pkg := indexes[config.Index][reponame].Authors[auth].Packages[fn] msg(" | Adding packages to index") if _, ok := idxpackages[reponame]; !ok { idxpackages[reponame] = make(map[string]*PkgSpace) } filemap[pkg.AuthorURL()] = reponame for _, prov := range pkg.Provides { parts := strings.Split(prov.Name, "::") if _, ok := packages[parts[0]]; !ok { packages[parts[0]] = &PkgSpace{ Namespace: parts[0], Packages: make([]*gopan.PerlPackage, 0), Children: make(map[string]*PkgSpace), Parent: nil, Versions: make(map[float64]*gopan.PerlPackage), } } if _, ok := idxpackages[reponame][parts[0]]; !ok { idxpackages[reponame][parts[0]] = &PkgSpace{ Namespace: parts[0], Packages: make([]*gopan.PerlPackage, 0), Children: make(map[string]*PkgSpace), Parent: nil, Versions: make(map[float64]*gopan.PerlPackage), } } if len(parts) == 1 { packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov) packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov idxpackages[reponame][parts[0]].Packages = append(idxpackages[reponame][parts[0]].Packages, prov) idxpackages[reponame][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov } else { packages[parts[0]].Populate(parts[1:], prov) idxpackages[reponame][parts[0]].Populate(parts[1:], prov) } } msg(" | Writing to index file") gopan.AppendToIndex(config.CacheDir+"/"+config.Index, indexes[config.Index][reponame], indexes[config.Index][reponame].Authors[auth], indexes[config.Index][reponame].Authors[auth].Packages[fn]) } msg(" | Imported module") } nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes) // TODO should probably be in the index - needs to udpate when index changes summary = &Summary{nsrc, nauth, npkg, nprov} msg(":DONE") job.Complete = true }
func import1(session *http.Session) { session.Stash["indexes"] = indexes session.Stash["Title"] = "SmartPAN Import" m := &ImportForm{} f := form.New(session, m) session.Stash["fh"] = f log.Info("Headers: %s", session.Request.Header()) if session.Request.Method != "POST" { render_import(session) return } f.Populate(true) f.Validate() if f.HasErrors { render_import(session) return } log.Info("Importing into: %s", m.ImportInto) if m.ImportInto == "new_index" { if len(m.NewIndex) == 0 { f.HasErrors = true f.Errors["NewIndex"] = make(map[string]error) f.Errors["NewIndex"]["required"] = errors.New("Please give the new repository a name") render_import(session) return } log.Info("=> Creating new index: %s", m.NewIndex) } b := make([]byte, 20) rand.Read(b) en := base64.URLEncoding d := make([]byte, en.EncodedLen(len(b))) en.Encode(d, b) job := &ImportJob{ Form: m, Complete: false, Id: string(d), Watchers: make([]func(string), 0), } if len(m.Cpanfile) > 0 { log.Info("Got cpanfile:") log.Info(m.Cpanfile) } log.Info("=> Created import job: %s", job.Id) imports[job.Id] = job go do_import(session, job) //render_import(session) if _, ok := session.Request.Form()["stream"]; ok { session.Redirect(&url.URL{Path: "/import/" + job.Id + "/stream", RawQuery: "raw=y"}) } else { session.Redirect(&url.URL{Path: "/import/" + job.Id}) } }
func main() { configure() args := flag.Args() if len(args) > 0 && args[0] == "init" { log.Info("Initialising SmartPAN") log.Info("=> Installing Perl dependencies") // FIXME most of this is repeated from getpan/main.go cfg := getpan.DefaultConfig() cfg.CacheDir = config.CacheDir for _, source := range cfg.Sources { if err := source.Load(); err != nil { log.Error("Error loading sources: %s", err) os.Exit(1) return } } deps := &getpan.DependencyList{ Dependencies: make([]*getpan.Dependency, 0), } d1, _ := getpan.DependencyFromString("Parse::LocalDistribution", "") d2, _ := getpan.DependencyFromString("JSON::XS", "") deps.AddDependency(d1) deps.AddDependency(d2) if err := deps.Resolve(); err != nil { log.Error("Error resolving dependencies: %s", err) os.Exit(1) return } _, err := deps.Install() if err != nil { log.Error("Error installing dependencies: %s", err) os.Exit(2) return } log.Info(" - Installed %d modules", deps.UniqueInstalled()) log.Info("SmartPAN initialisation complete") return } if config.TestDeps { perldeps := gopan.TestPerlDeps() perldeps.Dump() if !perldeps.Ok { log.Error("Required perl dependencies are missing") os.Exit(1) return } } if len(args) > 0 && args[0] == "import" { if len(args) < 4 { log.Error("Invalid arguments, expecting: smartpan import FILE AUTHORID INDEX") return } fname := args[1] log.Info("Importing module from %s", fname) log.Info("Author ID: %s", args[2]) log.Info("Index : %s", args[3]) extraParams := map[string]string{ "importinto": args[3], "authorid": args[2], "newindex": "", "cpanmirror": "", "importurl": "", "fromdir": "", } if strings.HasPrefix(fname, "http://") || strings.HasPrefix(fname, "https://") { log.Info("URL: %s", fname) extraParams["importurl"] = fname request, err := newFormPostRequest(config.RemoteHost+"/import?stream=y", extraParams) if err != nil { log.Error("Create request error: %s", err.Error()) return } client := &nethttp.Client{} resp, err := client.Do(request) if err != nil { log.Error("Error connecting to host: %s", err.Error()) return } else { // TODO stream this body := &bytes.Buffer{} _, err := body.ReadFrom(resp.Body) if err != nil { log.Error("Error reading response: %s", err.Error()) return } resp.Body.Close() //log.Info("%d", resp.StatusCode) //log.Info("%s", resp.Header) log.Info("%s", body.String()) } } else { fname = strings.TrimPrefix(fname, "file://") log.Info("File: %s", fname) if _, err := os.Stat(fname); err != nil { log.Error("File not found: %s", err.Error()) return } request, err := newfileUploadRequest(config.RemoteHost+"/import?stream=y", extraParams, "fromfile", fname) if err != nil { log.Error("Create upload error: %s", err.Error()) return } client := &nethttp.Client{} resp, err := client.Do(request) if err != nil { log.Error("Error connecting to host: %s", err.Error()) return } else { // TODO stream this body := &bytes.Buffer{} _, err := body.ReadFrom(resp.Body) if err != nil { log.Error("Error reading response: %s", err.Error()) return } resp.Body.Close() //log.Info("%d", resp.StatusCode) //log.Info("%s", resp.Header) log.Info("%s", body.String()) } } return } config.CurrentRelease = CurrentRelease var wg sync.WaitGroup load_index = func(index string, file string) { indexes[index] = gopan.LoadIndex(file) } wg.Add(1) go func() { defer wg.Done() indexes = make(map[string]map[string]*gopan.Source) // Load CPAN index if fi, err := os.Stat(config.CacheDir + "/" + config.CPANIndex); err == nil { config.HasCPANIndex = true config.CPANIndexDate = fi.ModTime().String() config.CPANStatus = "Loading" wg.Add(1) go func() { defer wg.Done() load_index(config.CPANIndex, config.CacheDir+"/"+config.CPANIndex) config.CPANStatus = "Loaded" }() } // Load BackPAN index if fi, err := os.Stat(config.CacheDir + "/" + config.BackPANIndex); err == nil { config.HasBackPANIndex = true config.BackPANIndexDate = fi.ModTime().String() config.BackPANStatus = "Loading" wg.Add(1) go func() { defer wg.Done() load_index(config.BackPANIndex, config.CacheDir+"/"+config.BackPANIndex) config.BackPANStatus = "Loaded" }() } // Load our secondary indexes for _, idx := range config.Indexes { wg.Add(1) go func() { defer wg.Done() load_index(idx, config.CacheDir+"/"+idx) }() } // Load our primary index (this is the only index written back to) wg.Add(1) go func() { defer wg.Done() load_index(config.Index, config.CacheDir+"/"+config.Index) }() }() update_indexes = func() { wg.Wait() wg.Add(1) go func() { wg.Wait() config.ImportAvailable = true nsrc, nauth, npkg, nprov := gopan.CountIndex(indexes) // TODO should probably be in the index - needs to udpate when index changes summary = &Summary{nsrc, nauth, npkg, nprov} // Do this now so changing the level doesn't interfere with index load log.Logger().SetLevel(log.Stol(config.LogLevel)) }() defer wg.Done() // Create in-memory indexes for UI/search etc for fname, _ := range indexes { for idn, idx := range indexes[fname] { mapped[idx.Name] = make(map[string]map[string]map[string]*gopan.Author) for _, auth := range idx.Authors { // author name if _, ok := mapped[idx.Name][auth.Name[:1]]; !ok { mapped[idx.Name][auth.Name[:1]] = make(map[string]map[string]*gopan.Author) } if _, ok := mapped[idx.Name][auth.Name[:1]][auth.Name[:2]]; !ok { mapped[idx.Name][auth.Name[:1]][auth.Name[:2]] = make(map[string]*gopan.Author) } mapped[idx.Name][auth.Name[:1]][auth.Name[:2]][auth.Name] = auth // wildcards if _, ok := mapped[idx.Name]["*"]; !ok { mapped[idx.Name]["*"] = make(map[string]map[string]*gopan.Author) } if _, ok := mapped[idx.Name]["*"]["**"]; !ok { mapped[idx.Name]["*"]["**"] = make(map[string]*gopan.Author) } mapped[idx.Name]["*"]["**"][auth.Name] = auth // combos if _, ok := mapped[idx.Name][auth.Name[:1]]["**"]; !ok { mapped[idx.Name][auth.Name[:1]]["**"] = make(map[string]*gopan.Author) } if _, ok := mapped[idx.Name]["*"][auth.Name[:2]]; !ok { mapped[idx.Name]["*"][auth.Name[:2]] = make(map[string]*gopan.Author) } mapped[idx.Name][auth.Name[:1]]["**"][auth.Name] = auth mapped[idx.Name]["*"][auth.Name[:2]][auth.Name] = auth for _, pkg := range auth.Packages { filemap[pkg.AuthorURL()] = idn for _, prov := range pkg.Provides { parts := strings.Split(prov.Name, "::") log.Trace("PACKAGE: %s", prov.Name) if _, ok := packages[parts[0]]; !ok { packages[parts[0]] = &PkgSpace{ Namespace: parts[0], Packages: make([]*gopan.PerlPackage, 0), Children: make(map[string]*PkgSpace), Parent: nil, Versions: make(map[float64]*gopan.PerlPackage), } } if _, ok := idxpackages[idx.Name]; !ok { idxpackages[idx.Name] = make(map[string]*PkgSpace) } if _, ok := idxpackages[idx.Name][parts[0]]; !ok { idxpackages[idx.Name][parts[0]] = &PkgSpace{ Namespace: parts[0], Packages: make([]*gopan.PerlPackage, 0), Children: make(map[string]*PkgSpace), Parent: nil, Versions: make(map[float64]*gopan.PerlPackage), } } if len(parts) == 1 { packages[parts[0]].Packages = append(packages[parts[0]].Packages, prov) packages[parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov idxpackages[idx.Name][parts[0]].Packages = append(idxpackages[idx.Name][parts[0]].Packages, prov) idxpackages[idx.Name][parts[0]].Versions[gopan.VersionFromString(prov.Version)] = prov log.Trace("Version linked: %f for %s", gopan.VersionFromString(prov.Version), prov.Name) } else { packages[parts[0]].Populate(parts[1:], prov) idxpackages[idx.Name][parts[0]].Populate(parts[1:], prov) } } } } } } } go update_indexes() // Get latest SmartPAN version go func() { res, err := nethttp.Get("https://api.github.com/repos/companieshouse/gopan/releases") if err != nil { log.Error("Error getting latest version: %s", err.Error()) return } defer res.Body.Close() b, err := ioutil.ReadAll(res.Body) if err != nil { log.Error("Error reading stream: %s", err.Error()) return } var r Releases if err = json.Unmarshal(b, &r); err != nil { log.Error("Error unmarshalling JSON: %s", err.Error()) return } log.Info("Current release: %s", config.CurrentRelease) rel := strings.TrimPrefix(r[0].TagName, "v") log.Info("Latest release: %s", rel) config.LatestRelease = rel config.UpdateURL = r[0].URL if config.CurrentRelease < rel { config.CanUpdate = true log.Info("Your version of SmartPAN can be updated.") } }() // Create our Gotcha application var app = gotcha.Create(Asset) app.Config.Listen = config.Bind summary = &Summary{0, 0, 0, 0} app.On(events.BeforeHandler, func(session *http.Session, next func()) { session.Stash["summary"] = summary session.Stash["config"] = config next() }) // Get the router r := app.Router // Create some routes r.Get("/", search) r.Post("/", search) r.Get("/help", help) r.Get("/settings", settings) r.Get("/browse", browse) r.Get("/import", import1) r.Post("/import", import1) r.Get("/import/(?P<jobid>[^/]+)", import2) r.Get("/import/(?P<jobid>[^/]+)/stream", importstream) r.Post("/get-index/(?P<index>(CPAN|BackPAN))/?", getindex) // Serve static content (but really use a CDN) r.Get("/images/(?P<file>.*)", r.Static("assets/images/{{file}}")) r.Get("/css/(?P<file>.*)", r.Static("assets/css/{{file}}")) // JSON endpoints r.Get("/where/(?P<module>[^/]+)/?", where) r.Get("/where/(?P<module>[^/]+)/(?P<version>[^/]+)/?", where) // Put these last so they only match /{repo} if nothing else matches r.Get("/(?P<repo>[^/]+)/?", browse) r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/?", browse) r.Get("/(?P<repo>[^/]+)/modules/02packages\\.details\\.txt(?P<gz>\\.gz)?", pkgindex) r.Get("/(?P<repo>[^/]+)/authors/id/(?P<file>.*\\.tar\\.gz)", download) r.Post("/delete/(?P<repo>[^/]+)/authors/id/(?P<auth1>[^/]+)/(?P<auth2>[^/]+)/(?P<auth3>[^/]+)/(?P<file>.*\\.tar\\.gz)", delete_file) r.Get("/(?P<repo>[^/]+)/(?P<type>[^/]+)/(?P<path>.*)/?", browse) // Start our application app.Start() <-make(chan int) }
func (m *Module) PrintDeps(d int) { log.Info(MkIndent(d)+"%s (%s): %s", m.Name, m.Version, m.Cached) if m.Deps != nil { m.Deps.PrintDeps(d + 1) } }
func getPackages() int { newpkg := 0 var pl func(*html.Node, *gopan.Source, *gopan.Author) pl = func(n *html.Node, source *gopan.Source, author *gopan.Author) { log.Trace("NODE: %s [%s, %s, %s]", n.DataAtom, n.Type, n.Data) if n.Type == html.ElementNode && n.Data == "a" { //log.Info("NODE IS ELEMENTNODE") for _, attr := range n.Attr { // FIXME stuff that isn't .tar.gz? if attr.Key == "href" && strings.HasSuffix(attr.Val, ".tar.gz") { log.Trace("==> HREF: %s", n.FirstChild.Data) pkg := strings.TrimSuffix(n.FirstChild.Data, "/") if _, ok := author.Packages[pkg]; !ok { author.Packages[pkg] = &gopan.Package{ Name: pkg, Author: author, URL: author.URL + "/" + pkg, } newpkg++ log.Debug("Found package: %s", pkg) } } } //log.Info("%s", n.Data) } for c := n.FirstChild; c != nil; c = c.NextSibling { pl(c, source, author) } } log.Info("Building package list") for fname, _ := range indexes { for _, source := range indexes[fname] { log.Debug("Index: %s", source) wg.Add(1) go func(source *gopan.Source) { defer wg.Done() for _, author := range source.Authors { wg.Add(1) go func(author *gopan.Author) { defer wg.Done() sem <- 1 log.Trace("=> %s", author) url := source.URL + "/" + author.Name[:1] + "/" + author.Name[:2] + "/" + author.Name + "/" log.Trace("Getting URL: %s", url) res, err := http.Get(url) if err != nil { log.Error("HTTP GET - %s", err.Error()) <-sem return } doc, err := html.Parse(res.Body) if err != nil { log.Error("HTML PARSE - %s", err.Error()) <-sem return } pl(doc, source, author) <-sem }(author) } }(source) } } wg.Wait() log.Info("Finished building package list") return newpkg }
func mirrorPan() { log.Info("Mirroring *PAN") // FIXME inefficient _, _, npkg, _ := gopan.CountIndex(indexes) mirrored := 0 var pc = func() int { return mirrored / npkg * 100 } for fname, _ := range indexes { log.Debug("File: %s", fname) for _, source := range indexes[fname] { log.Debug("Index: %s", source) wg.Add(1) go func(source *gopan.Source) { defer wg.Done() for _, author := range source.Authors { log.Debug("=> %s", author) wg.Add(1) go func(author *gopan.Author) { cachedir := config.CacheDir + "/" + source.Name + "/" + author.Name[:1] + "/" + author.Name[:2] + "/" + author.Name + "/" os.MkdirAll(cachedir, 0777) defer wg.Done() for _, pkg := range author.Packages { wg.Add(1) go func(pkg *gopan.Package) { defer wg.Done() cache := cachedir + pkg.Name log.Trace(" - Caching to: %s", cache) if _, err := os.Stat(cache); err == nil { log.Debug("%d%% |> %s", pc(), pkg) log.Trace(" - Already exists in cache") mirrored++ return } sem <- 1 mirrored++ log.Debug("%d%% => %s", pc(), pkg) url := source.URL + "/" + author.Name[:1] + "/" + author.Name[:2] + "/" + author.Name + "/" + pkg.Name log.Trace(" - From URL: %s", url) out, err := os.Create(cache) defer out.Close() if err != nil { log.Error("CREATE - %s", err.Error()) <-sem return } resp, err := http.Get(url) if err != nil { log.Error("HTTP GET - %s", err.Error()) <-sem return } _, err = io.Copy(out, resp.Body) if err != nil { log.Error("IO COPY - %s", err.Error()) } <-sem }(pkg) } }(author) } }(source) } } wg.Wait() log.Info("Finished mirroring *PAN") }