// checkFile performs a static semantic analysis check on the given file. func compileFile(path string, output io.Writer, goccLexer bool) error { // Lexical analysis // Syntactic analysis // Semantic analysis // Intermediate representation generation // Create lexer for the input. buf, err := ioutilx.ReadFile(path) if err != nil { return errutil.Err(err) } if path == "-" { path = "<stdin>" } fmt.Fprintf(os.Stderr, "Compiling %q\n", path) var s parser.Scanner if goccLexer { s = goccscanner.NewFromBytes(buf) } else { s = handscanner.NewFromBytes(buf) } // Parse input. p := parser.NewParser() f, err := p.Parse(s) if err != nil { if err, ok := err.(*goccerrors.Error); ok { // Unwrap Gocc error. return parser.NewError(err) } return errutil.Err(err) } file := f.(*ast.File) input := string(buf) src := semerrors.NewSource(path, input) info, err := sem.Check(file) if err != nil { if err, ok := err.(*errutil.ErrInfo); ok { // Unwrap errutil error. if err, ok := err.Err.(*semerrors.Error); ok { // Unwrap semantic analysis error, and add input source information. err.Src = src return err } } return errutil.Err(err) } // Generate LLVM IR module based on the syntax tree of the given file. module := irgen.Gen(file, info) if _, err := fmt.Fprint(output, module); err != nil { return errutil.Err(err) } return nil }
// checkFile performs a static semantic analysis check on the given file. func checkFile(path string, goccLexer bool) error { // Lexical analysis // Syntactic analysis (skip function bodies) // Top-level declarations; used for forward-declarations. // Syntactic analysis (including function bodies) // NOTE: "For each method body, we rewind the lexer to the point where the // method body began and parse the method body." // // ref: https://blogs.msdn.microsoft.com/ericlippert/2010/02/04/how-many-passes/ // Semantic analysis // Create lexer for the input. buf, err := ioutilx.ReadFile(path) if err != nil { return errutil.Err(err) } if path == "-" { path = "<stdin>" } fmt.Fprintf(os.Stderr, "Checking %q\n", path) var s parser.Scanner if goccLexer { s = goccscanner.NewFromBytes(buf) } else { s = handscanner.NewFromBytes(buf) } // Parse input. p := parser.NewParser() f, err := p.Parse(s) if err != nil { if err, ok := err.(*goccerrors.Error); ok { // Unwrap Gocc error. return parser.NewError(err) } return errutil.Err(err) } file := f.(*ast.File) input := string(buf) src := semerrors.NewSource(path, input) if _, err := sem.Check(file); err != nil { if err, ok := err.(*errutil.ErrInfo); ok { // Unwrap errutil error. if err, ok := err.Err.(*semerrors.Error); ok { // Unwrap semantic analysis error, and add input source information. err.Src = src return err } } return errutil.Err(err) } return nil }
// parseFile parses the given file and pretty-prints its abstract syntax tree to // standard output, optionally using the Gocc generated lexer. func parseFile(path string, goccLexer bool) error { // Create lexer for the input. buf, err := ioutilx.ReadFile(path) if err != nil { return errutil.Err(err) } if path == "-" { fmt.Fprintln(os.Stderr, "Parsing from standard input") } else { fmt.Fprintf(os.Stderr, "Parsing %q\n", path) } var s parser.Scanner if goccLexer { s = goccscanner.NewFromBytes(buf) } else { s = handscanner.NewFromBytes(buf) } // Parse input. p := parser.NewParser() file, err := p.Parse(s) if err != nil { if err, ok := err.(*errors.Error); ok { // Unwrap Gocc error. return parser.NewError(err) } return errutil.Err(err) } f := file.(*ast.File) for _, decl := range f.Decls { fmt.Println("=== [ Top-level declaration ] ===") fmt.Println() fmt.Printf("decl type: %T\n", decl) fmt.Println() fmt.Println("decl:", decl) fmt.Println() pretty.Print(decl) fmt.Println() spew.Print(decl) fmt.Println() fmt.Println() } return nil }
// checkFile performs a static semantic analysis check on the given file. func compileFile(path string, outputPath string, goccLexer bool) error { // Lexical analysis // Syntactic analysis // Semantic analysis // Intermediate representation generation // Create lexer for the input. buf, err := ioutilx.ReadFile(path) if err != nil { return errutil.Err(err) } if path == "-" { path = "<stdin>" } fmt.Fprintf(os.Stderr, "Compiling %q\n", path) var s parser.Scanner if goccLexer { s = goccscanner.NewFromBytes(buf) } else { s = handscanner.NewFromBytes(buf) } // Parse input. p := parser.NewParser() f, err := p.Parse(s) if err != nil { if err, ok := err.(*goccerrors.Error); ok { // Unwrap Gocc error. return parser.NewError(err) } return errutil.Err(err) } file := f.(*ast.File) input := string(buf) src := semerrors.NewSource(path, input) info, err := sem.Check(file) if err != nil { if err, ok := err.(*errutil.ErrInfo); ok { // Unwrap errutil error. if err, ok := err.Err.(*semerrors.Error); ok { // Unwrap semantic analysis error, and add input source information. err.Src = src return err } } return errutil.Err(err) } // Generate LLVM IR module based on the syntax tree of the given file. module := irgen.Gen(file, info) // Add path to uc lib. lib, err := goutil.SrcDir("github.com/mewmew/uc/testdata") if err != nil { return errutil.Err(err) } lib = filepath.Join(lib, "uc.ll") // Link and create binary through clang clang := exec.Command("clang", "-o", outputPath, "-x", "ir", lib, "-") clang.Stdin = strings.NewReader(module.String()) clang.Stderr = os.Stderr clang.Stdout = os.Stdout if err := clang.Run(); err != nil { return errutil.Err(err) } return nil }