fix panic based error reporting
diff --git a/ast.go b/ast.go
index 65d40e1..92f123e 100644
--- a/ast.go
+++ b/ast.go
@@ -16,33 +16,27 @@
 
 import (
 	"bytes"
-	"fmt"
 	"strings"
 )
 
 type ast interface {
-	eval(*Evaluator)
+	eval(*Evaluator) error
 	show()
 }
 
-type astBase struct {
-	filename string
-	lineno   int
-}
-
 type assignAST struct {
-	astBase
+	srcpos
 	lhs Value
 	rhs Value
 	op  string
 	opt string // "override", "export"
 }
 
-func (ast *assignAST) eval(ev *Evaluator) {
-	ev.evalAssign(ast)
+func (ast *assignAST) eval(ev *Evaluator) error {
+	return ev.evalAssign(ast)
 }
 
-func (ast *assignAST) evalRHS(ev *Evaluator, lhs string) Var {
+func (ast *assignAST) evalRHS(ev *Evaluator, lhs string) (Var, error) {
 	origin := "file"
 	if ast.filename == bootstrapMakefileName {
 		origin = "default"
@@ -55,31 +49,33 @@
 	case ":=":
 		switch v := ast.rhs.(type) {
 		case literal:
-			return &simpleVar{value: v.String(), origin: origin}
+			return &simpleVar{value: v.String(), origin: origin}, nil
 		case tmpval:
-			return &simpleVar{value: v.String(), origin: origin}
+			return &simpleVar{value: v.String(), origin: origin}, nil
 		default:
 			var buf bytes.Buffer
-			v.Eval(&buf, ev)
-			return &simpleVar{value: buf.String(), origin: origin}
+			err := v.Eval(&buf, ev)
+			if err != nil {
+				return nil, err
+			}
+			return &simpleVar{value: buf.String(), origin: origin}, nil
 		}
 	case "=":
-		return &recursiveVar{expr: ast.rhs, origin: origin}
+		return &recursiveVar{expr: ast.rhs, origin: origin}, nil
 	case "+=":
 		prev := ev.lookupVarInCurrentScope(lhs)
 		if !prev.IsDefined() {
-			return &recursiveVar{expr: ast.rhs, origin: origin}
+			return &recursiveVar{expr: ast.rhs, origin: origin}, nil
 		}
 		return prev.AppendVar(ev, ast.rhs)
 	case "?=":
 		prev := ev.lookupVarInCurrentScope(lhs)
 		if prev.IsDefined() {
-			return prev
+			return prev, nil
 		}
-		return &recursiveVar{expr: ast.rhs, origin: origin}
-	default:
-		panic(fmt.Sprintf("unknown assign op: %q", ast.op))
+		return &recursiveVar{expr: ast.rhs, origin: origin}, nil
 	}
+	return nil, ast.errorf("unknown assign op: %q", ast.op)
 }
 
 func (ast *assignAST) show() {
@@ -90,14 +86,14 @@
 // Note we cannot be sure what this is, until all variables in |expr|
 // are expanded.
 type maybeRuleAST struct {
-	astBase
+	srcpos
 	expr      Value
 	term      byte // Either ':', '=', or 0
 	afterTerm []byte
 }
 
-func (ast *maybeRuleAST) eval(ev *Evaluator) {
-	ev.evalMaybeRule(ast)
+func (ast *maybeRuleAST) eval(ev *Evaluator) error {
+	return ev.evalMaybeRule(ast)
 }
 
 func (ast *maybeRuleAST) show() {
@@ -105,12 +101,12 @@
 }
 
 type commandAST struct {
-	astBase
+	srcpos
 	cmd string
 }
 
-func (ast *commandAST) eval(ev *Evaluator) {
-	ev.evalCommand(ast)
+func (ast *commandAST) eval(ev *Evaluator) error {
+	return ev.evalCommand(ast)
 }
 
 func (ast *commandAST) show() {
@@ -118,13 +114,13 @@
 }
 
 type includeAST struct {
-	astBase
+	srcpos
 	expr string
 	op   string
 }
 
-func (ast *includeAST) eval(ev *Evaluator) {
-	ev.evalInclude(ast)
+func (ast *includeAST) eval(ev *Evaluator) error {
+	return ev.evalInclude(ast)
 }
 
 func (ast *includeAST) show() {
@@ -132,7 +128,7 @@
 }
 
 type ifAST struct {
-	astBase
+	srcpos
 	op         string
 	lhs        Value
 	rhs        Value // Empty if |op| is ifdef or ifndef.
@@ -140,8 +136,8 @@
 	falseStmts []ast
 }
 
-func (ast *ifAST) eval(ev *Evaluator) {
-	ev.evalIf(ast)
+func (ast *ifAST) eval(ev *Evaluator) error {
+	return ev.evalIf(ast)
 }
 
 func (ast *ifAST) show() {
@@ -150,13 +146,13 @@
 }
 
 type exportAST struct {
-	astBase
+	srcpos
 	expr   []byte
 	export bool
 }
 
-func (ast *exportAST) eval(ev *Evaluator) {
-	ev.evalExport(ast)
+func (ast *exportAST) eval(ev *Evaluator) error {
+	return ev.evalExport(ast)
 }
 
 func (ast *exportAST) show() {
diff --git a/bootstrap.go b/bootstrap.go
index 30ca77d..91cac40 100644
--- a/bootstrap.go
+++ b/bootstrap.go
@@ -22,7 +22,7 @@
 
 const bootstrapMakefileName = "*bootstrap*"
 
-func bootstrapMakefile(targets []string) makefile {
+func bootstrapMakefile(targets []string) (makefile, error) {
 	bootstrap := `
 CC:=cc
 CXX:=g++
@@ -45,12 +45,8 @@
 	bootstrap += fmt.Sprintf("MAKECMDGOALS:=%s\n", strings.Join(targets, " "))
 	cwd, err := filepath.Abs(".")
 	if err != nil {
-		panic(err)
+		return makefile{}, err
 	}
 	bootstrap += fmt.Sprintf("CURDIR:=%s\n", cwd)
-	mk, err := parseMakefileString(bootstrap, bootstrapMakefileName, 0)
-	if err != nil {
-		panic(err)
-	}
-	return mk
+	return parseMakefileString(bootstrap, srcpos{bootstrapMakefileName, 0})
 }
diff --git a/cmd/kati/main.go b/cmd/kati/main.go
index f45dd28..1732e18 100644
--- a/cmd/kati/main.go
+++ b/cmd/kati/main.go
@@ -162,28 +162,33 @@
 func main() {
 	runtime.GOMAXPROCS(runtime.NumCPU())
 	flag.Parse()
+	err := katiMain()
+	if err != nil {
+		fmt.Println(err)
+		// http://www.gnu.org/software/make/manual/html_node/Running.html
+		os.Exit(2)
+	}
+}
+
+func katiMain() error {
 	if cpuprofile != "" {
 		f, err := os.Create(cpuprofile)
 		if err != nil {
-			panic(err)
+			return err
 		}
 		pprof.StartCPUProfile(f)
 		defer pprof.StopCPUProfile()
-		kati.AtError(pprof.StopCPUProfile)
 	}
 	if heapprofile != "" {
 		defer writeHeapProfile()
-		kati.AtError(writeHeapProfile)
 	}
 	defer kati.DumpStats()
-	kati.AtError(kati.DumpStats)
 	if memstats != "" {
 		ms := memStatsDumper{
 			Template: template.Must(template.New("memstats").Parse(memstats)),
 		}
 		ms.dump()
 		defer ms.dump()
-		kati.AtError(ms.dump)
 	}
 	if traceEventFile != "" {
 		f, err := os.Create(traceEventFile)
@@ -192,7 +197,6 @@
 		}
 		kati.TraceEventStart(f)
 		defer kati.TraceEventStop()
-		kati.AtError(kati.TraceEventStop)
 	}
 
 	if shellDate != "" {
@@ -225,35 +229,41 @@
 
 	g, err := load(req)
 	if err != nil {
-		panic(err)
+		return err
 	}
 	nodes := g.Nodes()
 	vars := g.Vars()
 
 	err = save(g, req.Targets)
 	if err != nil {
-		panic(err)
+		return err
 	}
 
 	if generateNinja {
-		kati.GenerateNinja(g, gomaDir)
-		return
+		err = kati.GenerateNinja(g, gomaDir)
+		if err != nil {
+			return err
+		}
 	}
 
 	if syntaxCheckOnlyFlag {
-		return
+		return nil
 	}
 
 	if queryFlag != "" {
 		kati.Query(os.Stdout, queryFlag, g)
-		return
+		return nil
 	}
 
 	// TODO: Handle target specific variables.
 	ev := kati.NewEvaluator(vars)
 	for name, export := range g.Exports() {
 		if export {
-			os.Setenv(name, ev.EvaluateVar(name))
+			v, err := ev.EvaluateVar(name)
+			if err != nil {
+				return err
+			}
+			os.Setenv(name, v)
 		} else {
 			os.Unsetenv(name)
 		}
@@ -265,9 +275,13 @@
 	if usePara {
 		execOpt.ParaPath = findPara()
 	}
-	ex := kati.NewExecutor(vars, execOpt)
+	ex, err := kati.NewExecutor(vars, execOpt)
+	if err != nil {
+		return err
+	}
 	err = ex.Exec(nodes)
 	if err != nil {
-		panic(err)
+		return err
 	}
+	return nil
 }
diff --git a/dep.go b/dep.go
index 9fb7775..77d4587 100644
--- a/dep.go
+++ b/dep.go
@@ -21,6 +21,7 @@
 	"strings"
 )
 
+// DepNode represents a makefile rule for an output.
 type DepNode struct {
 	Output             string
 	Cmds               []string
@@ -145,7 +146,8 @@
 
 func (db *depBuilder) mergeImplicitRuleVars(outputs []string, vars Vars) Vars {
 	if len(outputs) != 1 {
-		panic(fmt.Sprintf("Implicit rule should have only one output but %q", outputs))
+		// TODO(ukai): should return error?
+		panic(fmt.Sprintf("FIXME: Implicit rule should have only one output but %q", outputs))
 	}
 	logf("merge? %q", db.ruleVars)
 	logf("merge? %q", outputs[0])
@@ -211,7 +213,8 @@
 	}
 	for _, irule := range rules {
 		if len(irule.inputs) != 1 {
-			panic(fmt.Sprintf("unexpected number of input for a suffix rule (%d)", len(irule.inputs)))
+			// TODO(ukai): should return error?
+			panic(fmt.Sprintf("FIXME: unexpected number of input for a suffix rule (%d)", len(irule.inputs)))
 		}
 		if !db.exists(replaceSuffix(output, irule.inputs[0])) {
 			continue
@@ -272,7 +275,11 @@
 				if !present || oldVar.String() == "" {
 					db.vars[name] = tsv
 				} else {
-					v = oldVar.AppendVar(NewEvaluator(db.vars), tsv)
+					var err error
+					v, err = oldVar.AppendVar(NewEvaluator(db.vars), tsv)
+					if err != nil {
+						return nil, err
+					}
 					db.vars[name] = v
 				}
 				tsvs[name] = v
@@ -296,7 +303,7 @@
 	for _, input := range rule.inputs {
 		if len(rule.outputPatterns) > 0 {
 			if len(rule.outputPatterns) > 1 {
-				panic("TODO: multiple output pattern is not supported yet")
+				panic(fmt.Sprintf("FIXME: multiple output pattern is not supported yet"))
 			}
 			input = intern(rule.outputPatterns[0].subst(input, output))
 		} else if rule.isSuffixRule {
@@ -337,6 +344,7 @@
 	n.ActualInputs = actualInputs
 	n.TargetSpecificVars = make(Vars)
 	for k, v := range tsvs {
+		logf("output=%s tsv %s=%s", output, k, v)
 		n.TargetSpecificVars[k] = v
 	}
 	n.Filename = rule.filename
@@ -373,13 +381,13 @@
 	return true
 }
 
-func mergeRules(oldRule, r *rule, output string, isSuffixRule bool) *rule {
+func mergeRules(oldRule, r *rule, output string, isSuffixRule bool) (*rule, error) {
 	if oldRule.isDoubleColon != r.isDoubleColon {
-		errorExit(r.filename, r.lineno, "*** target file %q has both : and :: entries.", output)
+		return nil, r.errorf("*** target file %q has both : and :: entries.", output)
 	}
 	if len(oldRule.cmds) > 0 && len(r.cmds) > 0 && !isSuffixRule && !r.isDoubleColon {
-		warn(r.filename, r.cmdLineno, "overriding commands for target %q", output)
-		warn(oldRule.filename, oldRule.cmdLineno, "ignoring old commands for target %q", output)
+		warn(r.cmdpos(), "overriding commands for target %q", output)
+		warn(oldRule.cmdpos(), "ignoring old commands for target %q", output)
 	}
 
 	mr := &rule{}
@@ -400,13 +408,13 @@
 		mr.orderOnlyInputs = append(oldRule.orderOnlyInputs, mr.orderOnlyInputs...)
 	}
 	mr.outputPatterns = append(mr.outputPatterns, oldRule.outputPatterns...)
-	return mr
+	return mr, nil
 }
 
-func (db *depBuilder) populateExplicitRule(r *rule) {
+func (db *depBuilder) populateExplicitRule(r *rule) error {
 	// It seems rules with no outputs are siliently ignored.
 	if len(r.outputs) == 0 {
-		return
+		return nil
 	}
 	for _, output := range r.outputs {
 		output = trimLeadingCurdir(output)
@@ -414,7 +422,10 @@
 		isSuffixRule := db.populateSuffixRule(r, output)
 
 		if oldRule, present := db.rules[output]; present {
-			mr := mergeRules(oldRule, r, output, isSuffixRule)
+			mr, err := mergeRules(oldRule, r, output, isSuffixRule)
+			if err != nil {
+				return err
+			}
 			db.rules[output] = mr
 		} else {
 			db.rules[output] = r
@@ -423,6 +434,7 @@
 			}
 		}
 	}
+	return nil
 }
 
 func (db *depBuilder) populateImplicitRule(r *rule) {
@@ -440,7 +452,7 @@
 	}
 }
 
-func (db *depBuilder) populateRules(er *evalResult) {
+func (db *depBuilder) populateRules(er *evalResult) error {
 	for _, r := range er.rules {
 		for i, input := range r.inputs {
 			r.inputs[i] = trimLeadingCurdir(input)
@@ -448,8 +460,10 @@
 		for i, orderOnlyInput := range r.orderOnlyInputs {
 			r.orderOnlyInputs[i] = trimLeadingCurdir(orderOnlyInput)
 		}
-		db.populateExplicitRule(r)
-
+		err := db.populateExplicitRule(r)
+		if err != nil {
+			return err
+		}
 		if len(r.outputs) == 0 {
 			db.populateImplicitRule(r)
 		}
@@ -463,6 +477,7 @@
 
 	sort.Stable(byPrefix(db.iprefixRules))
 	sort.Stable(bySuffix(db.isuffixRules))
+	return nil
 }
 
 func reverseImplicitRules(rules []*rule) {
@@ -500,7 +515,7 @@
 	}
 }
 
-func newDepBuilder(er *evalResult, vars Vars) *depBuilder {
+func newDepBuilder(er *evalResult, vars Vars) (*depBuilder, error) {
 	db := &depBuilder{
 		rules:       make(map[string]*rule),
 		ruleVars:    er.ruleVars,
@@ -510,20 +525,23 @@
 		phony:       make(map[string]bool),
 	}
 
-	db.populateRules(er)
+	err := db.populateRules(er)
+	if err != nil {
+		return nil, err
+	}
 	rule, present := db.rules[".PHONY"]
 	if present {
 		for _, input := range rule.inputs {
 			db.phony[input] = true
 		}
 	}
-	return db
+	return db, nil
 }
 
 func (db *depBuilder) Eval(targets []string) ([]*DepNode, error) {
 	if len(targets) == 0 {
 		if db.firstRule == nil {
-			errorNoLocationExit("*** No targets.")
+			return nil, fmt.Errorf("*** No targets.")
 		}
 		targets = append(targets, db.firstRule.outputs[0])
 	}
diff --git a/depgraph.go b/depgraph.go
index 2f243e4..a844aaa 100644
--- a/depgraph.go
+++ b/depgraph.go
@@ -22,6 +22,7 @@
 	"time"
 )
 
+// DepGraph represents rules defined in makefiles.
 type DepGraph struct {
 	nodes       []*DepNode
 	vars        Vars
@@ -30,11 +31,19 @@
 	isCached    bool
 }
 
-func (g *DepGraph) Nodes() []*DepNode        { return g.nodes }
-func (g *DepGraph) Vars() Vars               { return g.vars }
-func (g *DepGraph) Exports() map[string]bool { return g.exports }
-func (g *DepGraph) IsCached() bool           { return g.isCached }
+// Nodes returns all rules.
+func (g *DepGraph) Nodes() []*DepNode { return g.nodes }
 
+// Vars returns all variables.
+func (g *DepGraph) Vars() Vars { return g.vars }
+
+// Exports returns map for export variables.
+func (g *DepGraph) Exports() map[string]bool { return g.exports }
+
+// IsCached indicates the DepGraph is loaded from cache.
+func (g *DepGraph) IsCached() bool { return g.isCached }
+
+// LoadReq is a request to load makefile.
 type LoadReq struct {
 	Makefile         string
 	Targets          []string
@@ -44,6 +53,7 @@
 	EagerEvalCommand bool
 }
 
+// FromCommandLine creates LoadReq from given command line.
 func FromCommandLine(cmdline []string) LoadReq {
 	var vars []string
 	var targets []string
@@ -54,8 +64,12 @@
 		}
 		targets = append(targets, arg)
 	}
+	mk, err := defaultMakefile()
+	if err != nil {
+		logf("default makefile: %v", err)
+	}
 	return LoadReq{
-		Makefile:        defaultMakefile(),
+		Makefile:        mk,
 		Targets:         targets,
 		CommandLineVars: vars,
 	}
@@ -76,20 +90,28 @@
 	return nil
 }
 
+// Load loads makefile.
 func Load(req LoadReq) (*DepGraph, error) {
 	startTime := time.Now()
+	var err error
 	if req.Makefile == "" {
-		req.Makefile = defaultMakefile()
+		req.Makefile, err = defaultMakefile()
+		if err != nil {
+			return nil, err
+		}
 	}
 
 	if req.UseCache {
-		g := loadCache(req.Makefile, req.Targets)
-		if g != nil {
+		g, err := loadCache(req.Makefile, req.Targets)
+		if err == nil {
 			return g, nil
 		}
 	}
 
-	bmk := bootstrapMakefile(req.Targets)
+	bmk, err := bootstrapMakefile(req.Targets)
+	if err != nil {
+		return nil, err
+	}
 
 	content, err := ioutil.ReadFile(req.Makefile)
 	if err != nil {
@@ -125,7 +147,10 @@
 	logStats("shell func time: %q %d", shellStats.Duration(), shellStats.Count())
 
 	startTime = time.Now()
-	db := newDepBuilder(er, vars)
+	db, err := newDepBuilder(er, vars)
+	if err != nil {
+		return nil, err
+	}
 	logStats("dep build prepare time: %q", time.Since(startTime))
 
 	startTime = time.Now()
@@ -150,7 +175,10 @@
 	}
 	if req.EagerEvalCommand {
 		startTime := time.Now()
-		evalCommands(nodes, vars)
+		err = evalCommands(nodes, vars)
+		if err != nil {
+			return nil, err
+		}
 		logStats("eager eval command time: %q", time.Since(startTime))
 	}
 	if req.UseCache {
@@ -161,14 +189,17 @@
 	return gd, nil
 }
 
+// Loader is the interface that loads DepGraph.
 type Loader interface {
 	Load(string) (*DepGraph, error)
 }
 
+// Saver is the interface that saves DepGraph.
 type Saver interface {
 	Save(*DepGraph, string, []string) error
 }
 
+// LoadSaver is the interface that groups Load and Save methods.
 type LoadSaver interface {
 	Loader
 	Saver
diff --git a/doc.go b/doc.go
index 0cea5f9..6c9f26e 100644
--- a/doc.go
+++ b/doc.go
@@ -5,4 +5,4 @@
 */
 package kati
 
-// TODO(ukai): cleanup API. make more symbol unexported.
+// TODO(ukai): add more doc comments.
diff --git a/eval.go b/eval.go
index a72e612..cb2ddfe 100644
--- a/eval.go
+++ b/eval.go
@@ -105,6 +105,46 @@
 	exports     map[string]bool
 }
 
+type srcpos struct {
+	filename string
+	lineno   int
+}
+
+func (p srcpos) String() string {
+	return fmt.Sprintf("%s:%d", p.filename, p.lineno)
+}
+
+// EvalError is an error in kati evaluation.
+type EvalError struct {
+	Filename string
+	Lineno   int
+	Err      error
+}
+
+func (e EvalError) Error() string {
+	return fmt.Sprintf("%s:%d: %v", e.Filename, e.Lineno, e.Err)
+}
+
+func (p srcpos) errorf(f string, args ...interface{}) error {
+	return EvalError{
+		Filename: p.filename,
+		Lineno:   p.lineno,
+		Err:      fmt.Errorf(f, args...),
+	}
+}
+
+func (p srcpos) error(err error) error {
+	if _, ok := err.(EvalError); ok {
+		return err
+	}
+	return EvalError{
+		Filename: p.filename,
+		Lineno:   p.lineno,
+		Err:      err,
+	}
+}
+
+// Evaluator manages makefile evaluation.
 type Evaluator struct {
 	paramVars    []tmpval // $1 => paramVars[1]
 	outVars      Vars
@@ -118,10 +158,10 @@
 	cache        *accessCache
 	exports      map[string]bool
 
-	filename string
-	lineno   int
+	srcpos
 }
 
+// NewEvaluator creates new Evaluator.
 func NewEvaluator(vars map[string]Var) *Evaluator {
 	return &Evaluator{
 		outVars:     make(Vars),
@@ -131,10 +171,13 @@
 	}
 }
 
-func (ev *Evaluator) args(buf *buffer, args ...Value) [][]byte {
+func (ev *Evaluator) args(buf *buffer, args ...Value) ([][]byte, error) {
 	pos := make([]int, 0, len(args))
 	for _, arg := range args {
-		arg.Eval(buf, ev)
+		err := arg.Eval(buf, ev)
+		if err != nil {
+			return nil, err
+		}
 		pos = append(pos, buf.Len())
 	}
 	v := buf.Bytes()
@@ -144,24 +187,27 @@
 		buf.args = append(buf.args, v[s:p])
 		s = p
 	}
-	return buf.args
+	return buf.args, nil
 }
 
-func (ev *Evaluator) evalAssign(ast *assignAST) {
+func (ev *Evaluator) evalAssign(ast *assignAST) error {
 	ev.lastRule = nil
-	lhs, rhs := ev.evalAssignAST(ast)
+	lhs, rhs, err := ev.evalAssignAST(ast)
+	if err != nil {
+		return err
+	}
 	if LogFlag {
 		logf("ASSIGN: %s=%q (flavor:%q)", lhs, rhs, rhs.Flavor())
 	}
 	if lhs == "" {
-		errorExit(ast.filename, ast.lineno, "*** empty variable name.")
+		return ast.errorf("*** empty variable name.")
 	}
 	ev.outVars.Assign(lhs, rhs)
+	return nil
 }
 
-func (ev *Evaluator) evalAssignAST(ast *assignAST) (string, Var) {
-	ev.filename = ast.filename
-	ev.lineno = ast.lineno
+func (ev *Evaluator) evalAssignAST(ast *assignAST) (string, Var, error) {
+	ev.srcpos = ast.srcpos
 
 	var lhs string
 	switch v := ast.lhs.(type) {
@@ -171,37 +217,49 @@
 		lhs = string(v)
 	default:
 		buf := newBuf()
-		v.Eval(buf, ev)
+		err := v.Eval(buf, ev)
+		if err != nil {
+			return "", nil, err
+		}
 		lhs = string(trimSpaceBytes(buf.Bytes()))
 		freeBuf(buf)
 	}
-	rhs := ast.evalRHS(ev, lhs)
-	return lhs, rhs
+	rhs, err := ast.evalRHS(ev, lhs)
+	if err != nil {
+		return "", nil, err
+	}
+	return lhs, rhs, nil
 }
 
-func (ev *Evaluator) setTargetSpecificVar(assign *assignAST, output string) {
+func (ev *Evaluator) setTargetSpecificVar(assign *assignAST, output string) error {
 	vars, present := ev.outRuleVars[output]
 	if !present {
 		vars = make(Vars)
 		ev.outRuleVars[output] = vars
 	}
 	ev.currentScope = vars
-	lhs, rhs := ev.evalAssignAST(assign)
+	lhs, rhs, err := ev.evalAssignAST(assign)
+	if err != nil {
+		return err
+	}
 	if LogFlag {
-		logf("rule outputs:%q assign:%q=%q (flavor:%q)", output, lhs, rhs, rhs.Flavor())
+		logf("rule outputs:%q assign:%q%s%q (flavor:%q)", output, lhs, assign.op, rhs, rhs.Flavor())
 	}
 	vars.Assign(lhs, &targetSpecificVar{v: rhs, op: assign.op})
 	ev.currentScope = nil
+	return nil
 }
 
-func (ev *Evaluator) evalMaybeRule(ast *maybeRuleAST) {
+func (ev *Evaluator) evalMaybeRule(ast *maybeRuleAST) error {
 	ev.lastRule = nil
-	ev.filename = ast.filename
-	ev.lineno = ast.lineno
+	ev.srcpos = ast.srcpos
 
 	lexpr := ast.expr
 	buf := newBuf()
-	lexpr.Eval(buf, ev)
+	err := lexpr.Eval(buf, ev)
+	if err != nil {
+		return err
+	}
 	line := buf.Bytes()
 	if ast.term == '=' {
 		line = append(line, ast.afterTerm...)
@@ -213,16 +271,13 @@
 	// See semicolon.mk.
 	if len(bytes.TrimRight(line, " \t\n;")) == 0 {
 		freeBuf(buf)
-		return
+		return nil
 	}
 
-	r := &rule{
-		filename: ast.filename,
-		lineno:   ast.lineno,
-	}
+	r := &rule{srcpos: ast.srcpos}
 	assign, err := r.parse(line)
 	if err != nil {
-		errorExit(ast.filename, ast.lineno, "%v", err)
+		return ast.error(err)
 	}
 	freeBuf(buf)
 	if LogFlag {
@@ -236,15 +291,18 @@
 		if ast.term == ';' {
 			nexpr, _, err := parseExpr(ast.afterTerm, nil, false)
 			if err != nil {
-				panic(fmt.Errorf("parse %s:%d %v", ev.filename, ev.lineno, err))
+				return ast.errorf("parse error: %q: %v", string(ast.afterTerm), err)
 			}
 			lexpr = expr{lexpr, nexpr}
 
 			buf = newBuf()
-			lexpr.Eval(buf, ev)
+			err = lexpr.Eval(buf, ev)
+			if err != nil {
+				return err
+			}
 			assign, err = r.parse(buf.Bytes())
 			if err != nil {
-				errorExit(ast.filename, ast.lineno, "%v", err)
+				return ast.error(err)
 			}
 			freeBuf(buf)
 		}
@@ -254,7 +312,7 @@
 		for _, output := range r.outputPatterns {
 			ev.setTargetSpecificVar(assign, output.String())
 		}
-		return
+		return nil
 	}
 
 	if ast.term == ';' {
@@ -265,37 +323,42 @@
 	}
 	ev.lastRule = r
 	ev.outRules = append(ev.outRules, r)
+	return nil
 }
 
-func (ev *Evaluator) evalCommand(ast *commandAST) {
-	ev.filename = ast.filename
-	ev.lineno = ast.lineno
+func (ev *Evaluator) evalCommand(ast *commandAST) error {
+	ev.srcpos = ast.srcpos
 	if ev.lastRule == nil {
 		// This could still be an assignment statement. See
 		// assign_after_tab.mk.
 		if strings.IndexByte(ast.cmd, '=') >= 0 {
 			line := trimLeftSpace(ast.cmd)
-			mk, err := parseMakefileString(line, ast.filename, ast.lineno)
+			mk, err := parseMakefileString(line, ast.srcpos)
 			if err != nil {
-				panic(err)
+				return ast.errorf("parse failed: %q: %v", line, err)
 			}
 			if len(mk.stmts) == 1 && mk.stmts[0].(*assignAST) != nil {
-				ev.eval(mk.stmts[0])
+				err = ev.eval(mk.stmts[0])
+				if err != nil {
+					return err
+				}
 			}
-			return
+			return nil
 		}
 		// Or, a comment is OK.
 		if strings.TrimSpace(ast.cmd)[0] == '#' {
-			return
+			return nil
 		}
-		errorExit(ast.filename, ast.lineno, "*** commands commence before first target.")
+		return ast.errorf("*** commands commence before first target.")
 	}
 	ev.lastRule.cmds = append(ev.lastRule.cmds, ast.cmd)
 	if ev.lastRule.cmdLineno == 0 {
 		ev.lastRule.cmdLineno = ast.lineno
 	}
+	return nil
 }
 
+// LookupVar looks up named variable.
 func (ev *Evaluator) LookupVar(name string) Var {
 	if ev.currentScope != nil {
 		v := ev.currentScope.Lookup(name)
@@ -325,10 +388,13 @@
 // EvaluateVar evaluates variable named name.
 // Only for a few special uses such as getting SHELL and handling
 // export/unexport.
-func (ev *Evaluator) EvaluateVar(name string) string {
+func (ev *Evaluator) EvaluateVar(name string) (string, error) {
 	var buf bytes.Buffer
-	ev.LookupVar(name).Eval(&buf, ev)
-	return buf.String()
+	err := ev.LookupVar(name).Eval(&buf, ev)
+	if err != nil {
+		return "", err
+	}
+	return buf.String(), nil
 }
 
 func (ev *Evaluator) evalIncludeFile(fname string, mk makefile) error {
@@ -336,28 +402,37 @@
 	defer func() {
 		traceEvent.end(te)
 	}()
+	var err error
 	makefileList := ev.outVars.Lookup("MAKEFILE_LIST")
-	makefileList = makefileList.Append(ev, mk.filename)
+	makefileList, err = makefileList.Append(ev, mk.filename)
+	if err != nil {
+		return err
+	}
 	ev.outVars.Assign("MAKEFILE_LIST", makefileList)
 
 	for _, stmt := range mk.stmts {
-		ev.eval(stmt)
+		err = ev.eval(stmt)
+		if err != nil {
+			return err
+		}
 	}
 	return nil
 }
 
-func (ev *Evaluator) evalInclude(ast *includeAST) {
+func (ev *Evaluator) evalInclude(ast *includeAST) error {
 	ev.lastRule = nil
-	ev.filename = ast.filename
-	ev.lineno = ast.lineno
+	ev.srcpos = ast.srcpos
 
-	logf("%s:%d include %q", ev.filename, ev.lineno, ast.expr)
+	logf("%s include %q", ev.srcpos, ast.expr)
 	v, _, err := parseExpr([]byte(ast.expr), nil, false)
 	if err != nil {
-		panic(err)
+		return ast.errorf("parse failed: %q: %v", ast.expr, err)
 	}
 	var buf bytes.Buffer
-	v.Eval(&buf, ev)
+	err = v.Eval(&buf, ev)
+	if err != nil {
+		return ast.errorf("%v", err)
+	}
 	pats := splitSpaces(buf.String())
 	buf.Reset()
 
@@ -366,7 +441,7 @@
 		if strings.Contains(pat, "*") || strings.Contains(pat, "?") {
 			matched, err := filepath.Glob(pat)
 			if err != nil {
-				panic(err)
+				return ast.errorf("glob error: %s: %v", pat, err)
 			}
 			files = append(files, matched...)
 		} else {
@@ -381,36 +456,42 @@
 		mk, hash, err := makefileCache.parse(fn)
 		if os.IsNotExist(err) {
 			if ast.op == "include" {
-				errorExit(ev.filename, ev.lineno, "%v\nNOTE: kati does not support generating missing makefiles", err)
-			} else {
-				msg := ev.cache.update(fn, hash, fileNotExists)
-				if msg != "" {
-					warn(ev.filename, ev.lineno, "%s", msg)
-				}
-				continue
+				return ev.errorf("%v\nNOTE: kati does not support generating missing makefiles", err)
 			}
+			msg := ev.cache.update(fn, hash, fileNotExists)
+			if msg != "" {
+				warn(ev.srcpos, "%s", msg)
+			}
+			continue
 		}
 		msg := ev.cache.update(fn, hash, fileExists)
 		if msg != "" {
-			warn(ev.filename, ev.lineno, "%s", msg)
+			warn(ev.srcpos, "%s", msg)
 		}
 		err = ev.evalIncludeFile(fn, mk)
 		if err != nil {
-			panic(err)
+			return err
 		}
 	}
+	return nil
 }
 
-func (ev *Evaluator) evalIf(iast *ifAST) {
+func (ev *Evaluator) evalIf(iast *ifAST) error {
 	var isTrue bool
 	switch iast.op {
 	case "ifdef", "ifndef":
 		expr := iast.lhs
 		buf := newBuf()
-		expr.Eval(buf, ev)
+		err := expr.Eval(buf, ev)
+		if err != nil {
+			return iast.errorf("%v\n expr:%s", err, expr)
+		}
 		v := ev.LookupVar(buf.String())
 		buf.Reset()
-		v.Eval(buf, ev)
+		err = v.Eval(buf, ev)
+		if err != nil {
+			return iast.errorf("%v\n expr:%s=>%s", err, expr, v)
+		}
 		value := buf.String()
 		val := buf.Len()
 		freeBuf(buf)
@@ -422,7 +503,10 @@
 		lexpr := iast.lhs
 		rexpr := iast.rhs
 		buf := newBuf()
-		params := ev.args(buf, lexpr, rexpr)
+		params, err := ev.args(buf, lexpr, rexpr)
+		if err != nil {
+			return iast.errorf("%v\n (%s,%s)", err, lexpr, rexpr)
+		}
 		lhs := string(params[0])
 		rhs := string(params[1])
 		freeBuf(buf)
@@ -431,7 +515,7 @@
 			logf("%s lhs=%q %q rhs=%q %q => %t", iast.op, iast.lhs, lhs, iast.rhs, rhs, isTrue)
 		}
 	default:
-		panic(fmt.Sprintf("unknown if statement: %q", iast.op))
+		return iast.errorf("unknown if statement: %q", iast.op)
 	}
 
 	var stmts []ast
@@ -441,28 +525,35 @@
 		stmts = iast.falseStmts
 	}
 	for _, stmt := range stmts {
-		ev.eval(stmt)
+		err := ev.eval(stmt)
+		if err != nil {
+			return err
+		}
 	}
+	return nil
 }
 
-func (ev *Evaluator) evalExport(ast *exportAST) {
+func (ev *Evaluator) evalExport(ast *exportAST) error {
 	ev.lastRule = nil
-	ev.filename = ast.filename
-	ev.lineno = ast.lineno
+	ev.srcpos = ast.srcpos
 
 	v, _, err := parseExpr(ast.expr, nil, false)
 	if err != nil {
-		panic(err)
+		return ast.errorf("failed to parse: %q: %v", string(ast.expr), err)
 	}
 	var buf bytes.Buffer
-	v.Eval(&buf, ev)
+	err = v.Eval(&buf, ev)
+	if err != nil {
+		return ast.errorf("%v\n expr:%s", err, v)
+	}
 	for _, n := range splitSpacesBytes(buf.Bytes()) {
 		ev.exports[string(n)] = ast.export
 	}
+	return nil
 }
 
-func (ev *Evaluator) eval(stmt ast) {
-	stmt.eval(ev)
+func (ev *Evaluator) eval(stmt ast) error {
+	return stmt.eval(ev)
 }
 
 func eval(mk makefile, vars Vars, useCache bool) (er *evalResult, err error) {
@@ -470,21 +561,22 @@
 	if useCache {
 		ev.cache = newAccessCache()
 	}
-	defer func() {
-		if r := recover(); r != nil {
-			err = fmt.Errorf("panic in eval %s: %v", mk.filename, r)
-		}
-	}()
 
 	makefileList := vars.Lookup("MAKEFILE_LIST")
 	if !makefileList.IsDefined() {
 		makefileList = &simpleVar{value: "", origin: "file"}
 	}
-	makefileList = makefileList.Append(ev, mk.filename)
+	makefileList, err = makefileList.Append(ev, mk.filename)
+	if err != nil {
+		return nil, err
+	}
 	ev.outVars.Assign("MAKEFILE_LIST", makefileList)
 
 	for _, stmt := range mk.stmts {
-		ev.eval(stmt)
+		err = ev.eval(stmt)
+		if err != nil {
+			return nil, err
+		}
 	}
 
 	return &evalResult{
diff --git a/exec.go b/exec.go
index 160c0b7..2e6c346 100644
--- a/exec.go
+++ b/exec.go
@@ -24,6 +24,7 @@
 	"time"
 )
 
+// Executor manages execution of makefile rules.
 type Executor struct {
 	rules         map[string]*rule
 	implicitRules []*rule
@@ -53,38 +54,41 @@
 
 func (v autoVar) Flavor() string  { return "undefined" }
 func (v autoVar) Origin() string  { return "automatic" }
-func (v autoVar) IsDefined() bool { panic("not implemented") }
-func (v autoVar) String() string  { panic("not implemented") }
-func (v autoVar) Append(*Evaluator, string) Var {
-	panic("must not be called")
+func (v autoVar) IsDefined() bool { return true }
+func (v autoVar) Append(*Evaluator, string) (Var, error) {
+	return nil, fmt.Errorf("cannot append to autovar")
 }
-func (v autoVar) AppendVar(*Evaluator, Value) Var {
-	panic("must not be called")
+func (v autoVar) AppendVar(*Evaluator, Value) (Var, error) {
+	return nil, fmt.Errorf("cannot append to autovar")
 }
 func (v autoVar) serialize() serializableVar {
-	panic(fmt.Sprintf("cannot serialize auto var: %q", v))
+	return serializableVar{Type: ""}
 }
-func (v autoVar) dump(w io.Writer) {
-	panic(fmt.Sprintf("cannot dump auto var: %q", v))
+func (v autoVar) dump(d *dumpbuf) {
+	d.err = fmt.Errorf("cannot dump auto var: %v", v)
 }
 
 type autoAtVar struct{ autoVar }
 
-func (v autoAtVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoAtVar) Eval(w io.Writer, ev *Evaluator) error {
 	fmt.Fprint(w, v.ex.currentOutput)
+	return nil
 }
+func (v autoAtVar) String() string { return "$*" }
 
 type autoLessVar struct{ autoVar }
 
-func (v autoLessVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoLessVar) Eval(w io.Writer, ev *Evaluator) error {
 	if len(v.ex.currentInputs) > 0 {
 		fmt.Fprint(w, v.ex.currentInputs[0])
 	}
+	return nil
 }
+func (v autoLessVar) String() string { return "$<" }
 
 type autoHatVar struct{ autoVar }
 
-func (v autoHatVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoHatVar) Eval(w io.Writer, ev *Evaluator) error {
 	var uniqueInputs []string
 	seen := make(map[string]bool)
 	for _, input := range v.ex.currentInputs {
@@ -94,51 +98,69 @@
 		}
 	}
 	fmt.Fprint(w, strings.Join(uniqueInputs, " "))
+	return nil
 }
+func (v autoHatVar) String() string { return "$^" }
 
 type autoPlusVar struct{ autoVar }
 
-func (v autoPlusVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoPlusVar) Eval(w io.Writer, ev *Evaluator) error {
 	fmt.Fprint(w, strings.Join(v.ex.currentInputs, " "))
+	return nil
 }
+func (v autoPlusVar) String() string { return "$+" }
 
 type autoStarVar struct{ autoVar }
 
-func (v autoStarVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoStarVar) Eval(w io.Writer, ev *Evaluator) error {
 	// TODO: Use currentStem. See auto_stem_var.mk
 	fmt.Fprint(w, stripExt(v.ex.currentOutput))
+	return nil
 }
+func (v autoStarVar) String() string { return "$*" }
 
 type autoSuffixDVar struct {
 	autoVar
 	v Var
 }
 
-func (v autoSuffixDVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoSuffixDVar) Eval(w io.Writer, ev *Evaluator) error {
 	var buf bytes.Buffer
-	v.v.Eval(&buf, ev)
+	err := v.v.Eval(&buf, ev)
+	if err != nil {
+		return err
+	}
 	ws := newWordScanner(buf.Bytes())
 	sw := ssvWriter{w: w}
 	for ws.Scan() {
 		sw.WriteString(filepath.Dir(string(ws.Bytes())))
 	}
+	return nil
 }
 
+func (v autoSuffixDVar) String() string { return v.v.String() + "D" }
+
 type autoSuffixFVar struct {
 	autoVar
 	v Var
 }
 
-func (v autoSuffixFVar) Eval(w io.Writer, ev *Evaluator) {
+func (v autoSuffixFVar) Eval(w io.Writer, ev *Evaluator) error {
 	var buf bytes.Buffer
-	v.v.Eval(&buf, ev)
+	err := v.v.Eval(&buf, ev)
+	if err != nil {
+		return err
+	}
 	ws := newWordScanner(buf.Bytes())
 	sw := ssvWriter{w: w}
 	for ws.Scan() {
 		sw.WriteString(filepath.Base(string(ws.Bytes())))
 	}
+	return nil
 }
 
+func (v autoSuffixFVar) String() string { return v.v.String() + "F" }
+
 func (ex *Executor) makeJobs(n *DepNode, neededBy *job) error {
 	output := n.Output
 	if neededBy != nil {
@@ -219,29 +241,38 @@
 	}
 }
 
+// ExecutorOpt is an option for Executor.
 type ExecutorOpt struct {
 	NumJobs  int
 	ParaPath string
 }
 
-func NewExecutor(vars Vars, opt *ExecutorOpt) *Executor {
+// NewExecutor creates new Executor.
+func NewExecutor(vars Vars, opt *ExecutorOpt) (*Executor, error) {
 	if opt == nil {
 		opt = &ExecutorOpt{NumJobs: 1}
 	}
 	if opt.NumJobs < 1 {
 		opt.NumJobs = 1
 	}
+	wm, err := newWorkerManager(opt.NumJobs, opt.ParaPath)
+	if err != nil {
+		return nil, err
+	}
 	ex := &Executor{
 		rules:       make(map[string]*rule),
 		suffixRules: make(map[string][]*rule),
 		done:        make(map[string]*job),
 		vars:        vars,
-		wm:          newWorkerManager(opt.NumJobs, opt.ParaPath),
+		wm:          wm,
 	}
 	// TODO: We should move this to somewhere around evalCmd so that
 	// we can handle SHELL in target specific variables.
 	ev := NewEvaluator(ex.vars)
-	ex.shell = ev.EvaluateVar("SHELL")
+	ex.shell, err = ev.EvaluateVar("SHELL")
+	if err != nil {
+		ex.shell = "/bin/sh"
+	}
 	for k, v := range map[string]Var{
 		"@": autoAtVar{autoVar: autoVar{ex: ex}},
 		"<": autoLessVar{autoVar: autoVar{ex: ex}},
@@ -253,23 +284,24 @@
 		ex.vars[k+"D"] = autoSuffixDVar{v: v}
 		ex.vars[k+"F"] = autoSuffixFVar{v: v}
 	}
-	return ex
+	return ex, nil
 }
 
+// Exec executes to build roots.
 func (ex *Executor) Exec(roots []*DepNode) error {
 	startTime := time.Now()
 	for _, root := range roots {
 		ex.makeJobs(root, nil)
 	}
-	ex.wm.Wait()
+	err := ex.wm.Wait()
 	logStats("exec time: %q", time.Since(startTime))
-	return nil
+	return err
 }
 
-func (ex *Executor) createRunners(n *DepNode, avoidIO bool) ([]runner, bool) {
+func (ex *Executor) createRunners(n *DepNode, avoidIO bool) ([]runner, bool, error) {
 	var runners []runner
 	if len(n.Cmds) == 0 {
-		return runners, false
+		return runners, false, nil
 	}
 
 	var restores []func()
@@ -287,6 +319,7 @@
 	for k, v := range n.TargetSpecificVars {
 		restores = append(restores, ex.vars.save(k))
 		ex.vars[k] = v
+		logf("tsv: %s=%s", k, v)
 	}
 
 	ev := NewEvaluator(ex.vars)
@@ -300,20 +333,30 @@
 		shell:  ex.shell,
 	}
 	for _, cmd := range n.Cmds {
-		for _, r := range evalCmd(ev, r, cmd) {
+		rr, err := evalCmd(ev, r, cmd)
+		if err != nil {
+			return nil, false, err
+		}
+		for _, r := range rr {
 			if len(r.cmd) != 0 {
 				runners = append(runners, r)
 			}
 		}
 	}
-	return runners, ev.hasIO
+	return runners, ev.hasIO, nil
 }
 
-func evalCommands(nodes []*DepNode, vars Vars) {
+func evalCommands(nodes []*DepNode, vars Vars) error {
 	ioCnt := 0
-	ex := NewExecutor(vars, nil)
+	ex, err := NewExecutor(vars, nil)
+	if err != nil {
+		return err
+	}
 	for i, n := range nodes {
-		runners, hasIO := ex.createRunners(n, true)
+		runners, hasIO, err := ex.createRunners(n, true)
+		if err != nil {
+			return err
+		}
 		if hasIO {
 			ioCnt++
 			if ioCnt%100 == 0 {
@@ -338,4 +381,5 @@
 	}
 
 	logStats("%d/%d rules have IO", ioCnt, len(nodes))
+	return nil
 }
diff --git a/expr.go b/expr.go
index df81be3..a248325 100644
--- a/expr.go
+++ b/expr.go
@@ -53,42 +53,45 @@
 	bufFree.Put(buf)
 }
 
+// Value is an interface for value.
 type Value interface {
 	String() string
-	Eval(w io.Writer, ev *Evaluator)
+	Eval(w io.Writer, ev *Evaluator) error
 	serialize() serializableVar
-	dump(w io.Writer)
+	dump(d *dumpbuf)
 }
 
 // literal is literal value.
 type literal string
 
 func (s literal) String() string { return string(s) }
-func (s literal) Eval(w io.Writer, ev *Evaluator) {
+func (s literal) Eval(w io.Writer, ev *Evaluator) error {
 	io.WriteString(w, string(s))
+	return nil
 }
 func (s literal) serialize() serializableVar {
 	return serializableVar{Type: "literal", V: string(s)}
 }
-func (s literal) dump(w io.Writer) {
-	dumpByte(w, valueTypeLiteral)
-	dumpBytes(w, []byte(s))
+func (s literal) dump(d *dumpbuf) {
+	d.Byte(valueTypeLiteral)
+	d.Bytes([]byte(s))
 }
 
 // tmpval is temporary value.
 type tmpval []byte
 
 func (t tmpval) String() string { return string(t) }
-func (t tmpval) Eval(w io.Writer, ev *Evaluator) {
+func (t tmpval) Eval(w io.Writer, ev *Evaluator) error {
 	w.Write(t)
+	return nil
 }
 func (t tmpval) Value() []byte { return []byte(t) }
 func (t tmpval) serialize() serializableVar {
 	return serializableVar{Type: "tmpval", V: string(t)}
 }
-func (t tmpval) dump(w io.Writer) {
-	dumpByte(w, valueTypeTmpval)
-	dumpBytes(w, t)
+func (t tmpval) dump(d *dumpbuf) {
+	d.Byte(valueTypeTmpval)
+	d.Bytes(t)
 }
 
 // expr is a list of values.
@@ -102,10 +105,14 @@
 	return strings.Join(s, "")
 }
 
-func (e expr) Eval(w io.Writer, ev *Evaluator) {
+func (e expr) Eval(w io.Writer, ev *Evaluator) error {
 	for _, v := range e {
-		v.Eval(w, ev)
+		err := v.Eval(w, ev)
+		if err != nil {
+			return err
+		}
 	}
+	return nil
 }
 
 func (e expr) serialize() serializableVar {
@@ -115,11 +122,11 @@
 	}
 	return r
 }
-func (e expr) dump(w io.Writer) {
-	dumpByte(w, valueTypeExpr)
-	dumpInt(w, len(e))
+func (e expr) dump(d *dumpbuf) {
+	d.Byte(valueTypeExpr)
+	d.Int(len(e))
 	for _, v := range e {
-		v.dump(w)
+		v.dump(d)
 	}
 }
 
@@ -144,14 +151,21 @@
 	return fmt.Sprintf("${%s}", varname)
 }
 
-func (v *varref) Eval(w io.Writer, ev *Evaluator) {
+func (v *varref) Eval(w io.Writer, ev *Evaluator) error {
 	te := traceEvent.begin("var", v, traceEventMain)
 	buf := newBuf()
-	v.varname.Eval(buf, ev)
+	err := v.varname.Eval(buf, ev)
+	if err != nil {
+		return err
+	}
 	vv := ev.LookupVar(buf.String())
 	freeBuf(buf)
-	vv.Eval(w, ev)
+	err = vv.Eval(w, ev)
+	if err != nil {
+		return err
+	}
 	traceEvent.end(te)
+	return nil
 }
 
 func (v *varref) serialize() serializableVar {
@@ -160,9 +174,9 @@
 		Children: []serializableVar{v.varname.serialize()},
 	}
 }
-func (v *varref) dump(w io.Writer) {
-	dumpByte(w, valueTypeVarref)
-	v.varname.dump(w)
+func (v *varref) dump(d *dumpbuf) {
+	d.Byte(valueTypeVarref)
+	v.varname.dump(d)
 }
 
 // paramref is parameter reference e.g. $1.
@@ -172,25 +186,32 @@
 	return fmt.Sprintf("$%d", int(p))
 }
 
-func (p paramref) Eval(w io.Writer, ev *Evaluator) {
+func (p paramref) Eval(w io.Writer, ev *Evaluator) error {
 	te := traceEvent.begin("param", p, traceEventMain)
 	n := int(p)
 	if n < len(ev.paramVars) {
-		ev.paramVars[n].Eval(w, ev)
+		err := ev.paramVars[n].Eval(w, ev)
+		if err != nil {
+			return err
+		}
 	} else {
 		vv := ev.LookupVar(fmt.Sprintf("%d", n))
-		vv.Eval(w, ev)
+		err := vv.Eval(w, ev)
+		if err != nil {
+			return err
+		}
 	}
 	traceEvent.end(te)
+	return nil
 }
 
 func (p paramref) serialize() serializableVar {
 	return serializableVar{Type: "paramref", V: strconv.Itoa(int(p))}
 }
 
-func (p paramref) dump(w io.Writer) {
-	dumpByte(w, valueTypeParamref)
-	dumpInt(w, int(p))
+func (p paramref) dump(d *dumpbuf) {
+	d.Byte(valueTypeParamref)
+	d.Int(int(p))
 }
 
 // varsubst is variable substitutaion. e.g. ${var:pat=subst}.
@@ -204,16 +225,22 @@
 	return fmt.Sprintf("${%s:%s=%s}", v.varname, v.pat, v.subst)
 }
 
-func (v varsubst) Eval(w io.Writer, ev *Evaluator) {
+func (v varsubst) Eval(w io.Writer, ev *Evaluator) error {
 	te := traceEvent.begin("varsubst", v, traceEventMain)
 	buf := newBuf()
-	params := ev.args(buf, v.varname, v.pat, v.subst)
+	params, err := ev.args(buf, v.varname, v.pat, v.subst)
+	if err != nil {
+		return err
+	}
 	vname := string(params[0])
 	pat := string(params[1])
 	subst := string(params[2])
 	buf.Reset()
 	vv := ev.LookupVar(vname)
-	vv.Eval(buf, ev)
+	err = vv.Eval(buf, ev)
+	if err != nil {
+		return err
+	}
 	vals := splitSpaces(buf.String())
 	freeBuf(buf)
 	space := false
@@ -225,6 +252,7 @@
 		space = true
 	}
 	traceEvent.end(te)
+	return nil
 }
 
 func (v varsubst) serialize() serializableVar {
@@ -238,11 +266,11 @@
 	}
 }
 
-func (v varsubst) dump(w io.Writer) {
-	dumpByte(w, valueTypeVarsubst)
-	v.varname.dump(w)
-	v.pat.dump(w)
-	v.subst.dump(w)
+func (v varsubst) dump(d *dumpbuf) {
+	d.Byte(valueTypeVarsubst)
+	v.varname.dump(d)
+	v.pat.dump(d)
+	v.subst.dump(d)
 }
 
 func str(buf []byte, alloc bool) Value {
@@ -447,7 +475,7 @@
 				subst:   subst,
 			}, i + 1, nil
 		default:
-			panic(fmt.Sprintf("unexpected char"))
+			return nil, 0, fmt.Errorf("unexpected char %c at %d in %q", in[i], i, string(in))
 		}
 	}
 }
@@ -606,21 +634,36 @@
 	str string
 }
 
-func (f funcstats) Eval(w io.Writer, ev *Evaluator) {
+func (f funcstats) Eval(w io.Writer, ev *Evaluator) error {
 	te := traceEvent.begin("func", literal(f.str), traceEventMain)
-	f.Value.Eval(w, ev)
+	err := f.Value.Eval(w, ev)
+	if err != nil {
+		return err
+	}
 	// TODO(ukai): per functype?
 	traceEvent.end(te)
+	return nil
 }
 
-type matchVarref struct{}
+type matcherValue struct{}
 
-func (m matchVarref) String() string                  { return "$(match-any)" }
-func (m matchVarref) Eval(w io.Writer, ev *Evaluator) { panic("not implemented") }
-func (m matchVarref) serialize() serializableVar      { panic("not implemented") }
-func (m matchVarref) dump(w io.Writer)                { panic("not implemented") }
+func (m matcherValue) Eval(w io.Writer, ev *Evaluator) error {
+	return fmt.Errorf("couldn't eval matcher")
+}
+func (m matcherValue) serialize() serializableVar {
+	return serializableVar{Type: ""}
+}
+
+func (m matcherValue) dump(d *dumpbuf) {
+	d.err = fmt.Errorf("couldn't dump matcher")
+}
+
+type matchVarref struct{ matcherValue }
+
+func (m matchVarref) String() string { return "$(match-any)" }
 
 type literalRE struct {
+	matcherValue
 	*regexp.Regexp
 }
 
@@ -630,10 +673,7 @@
 	}
 }
 
-func (r literalRE) String() string                  { return r.Regexp.String() }
-func (r literalRE) Eval(w io.Writer, ev *Evaluator) { panic("not implemented") }
-func (r literalRE) serialize() serializableVar      { panic("not implemented") }
-func (r literalRE) dump(w io.Writer)                { panic("not implemented") }
+func (r literalRE) String() string { return r.Regexp.String() }
 
 func matchValue(exp, pat Value) bool {
 	switch pat := pat.(type) {
diff --git a/flags.go b/flags.go
index 105df2d..bf51d72 100644
--- a/flags.go
+++ b/flags.go
@@ -14,6 +14,7 @@
 
 package kati
 
+// Flags to control kati.
 var (
 	LogFlag           bool
 	StatsFlag         bool
diff --git a/func.go b/func.go
index 8cb59d8..b1fe320 100644
--- a/func.go
+++ b/func.go
@@ -90,10 +90,20 @@
 	}
 )
 
-func assertArity(name string, req, n int) {
+type arityError struct {
+	narg int
+	name string
+}
+
+func (e arityError) Error() string {
+	return fmt.Sprintf("*** insufficient number of arguments (%d) to function `%s'.", e.narg, e.name)
+}
+
+func assertArity(name string, req, n int) error {
 	if n-1 < req {
-		panic(fmt.Sprintf("*** insufficient number of arguments (%d) to function `%s'.", n-1, name))
+		return arityError{narg: n - 1, name: name}
 	}
+	return nil
 }
 
 func numericValueForFunc(v string) (int, bool) {
@@ -121,15 +131,15 @@
 
 func (c *fclosure) String() string {
 	if len(c.args) == 0 {
-		panic("no args in func")
+		return "$(func)"
 	}
 	arg0 := c.args[0].String()
 	if arg0 == "" {
-		panic(fmt.Errorf("wrong format of arg0: %q", arg0))
+		return "$(func )"
 	}
 	cp := closeParen(arg0[0])
 	if cp == 0 {
-		panic(fmt.Errorf("wrong format of arg0: %q", arg0))
+		return "${func }"
 	}
 	var args []string
 	for _, arg := range c.args[1:] {
@@ -146,10 +156,10 @@
 	return r
 }
 
-func (c *fclosure) dump(w io.Writer) {
-	dumpByte(w, valueTypeFunc)
+func (c *fclosure) dump(d *dumpbuf) {
+	d.Byte(valueTypeFunc)
 	for _, a := range c.args {
-		a.dump(w)
+		a.dump(d)
 	}
 }
 
@@ -157,10 +167,16 @@
 type funcSubst struct{ fclosure }
 
 func (f *funcSubst) Arity() int { return 3 }
-func (f *funcSubst) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("subst", 3, len(f.args))
+func (f *funcSubst) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("subst", 3, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	from := fargs[0]
 	to := fargs[1]
@@ -169,15 +185,22 @@
 	w.Write(bytes.Replace(text, from, to, -1))
 	freeBuf(abuf)
 	stats.add("funcbody", "subst", t)
+	return nil
 }
 
 type funcPatsubst struct{ fclosure }
 
 func (f *funcPatsubst) Arity() int { return 3 }
-func (f *funcPatsubst) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("patsubst", 3, len(f.args))
+func (f *funcPatsubst) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("patsubst", 3, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	pat := fargs[0]
 	repl := fargs[1]
@@ -197,15 +220,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "patsubst", t)
+	return nil
 }
 
 type funcStrip struct{ fclosure }
 
 func (f *funcStrip) Arity() int { return 1 }
-func (f *funcStrip) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("strip", 1, len(f.args))
+func (f *funcStrip) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("strip", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	space := false
@@ -218,15 +248,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "strip", t)
+	return nil
 }
 
 type funcFindstring struct{ fclosure }
 
 func (f *funcFindstring) Arity() int { return 2 }
-func (f *funcFindstring) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("findstring", 2, len(f.args))
+func (f *funcFindstring) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("findstring", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	find := fargs[0]
 	text := fargs[1]
@@ -235,15 +272,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "findstring", t)
+	return nil
 }
 
 type funcFilter struct{ fclosure }
 
 func (f *funcFilter) Arity() int { return 2 }
-func (f *funcFilter) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("filter", 2, len(f.args))
+func (f *funcFilter) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("filter", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	var patterns [][]byte
 	ws := newWordScanner(fargs[0])
@@ -262,15 +306,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "filter", t)
+	return nil
 }
 
 type funcFilterOut struct{ fclosure }
 
 func (f *funcFilterOut) Arity() int { return 2 }
-func (f *funcFilterOut) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("filter-out", 2, len(f.args))
+func (f *funcFilterOut) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("filter-out", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	var patterns [][]byte
 	ws := newWordScanner(fargs[0])
@@ -291,15 +342,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "filter-out", t)
+	return err
 }
 
 type funcSort struct{ fclosure }
 
 func (f *funcSort) Arity() int { return 1 }
-func (f *funcSort) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("sort", 1, len(f.args))
+func (f *funcSort) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("sort", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	var toks []string
@@ -322,23 +380,30 @@
 		prev = tok
 	}
 	stats.add("funcbody", "sort", t)
+	return nil
 }
 
 type funcWord struct{ fclosure }
 
 func (f *funcWord) Arity() int { return 2 }
-func (f *funcWord) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("word", 2, len(f.args))
+func (f *funcWord) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("word", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	v := string(trimSpaceBytes(fargs[0]))
 	index, ok := numericValueForFunc(v)
 	if !ok {
-		errorExit(ev.filename, ev.lineno, `*** non-numeric first argument to "word" function: %q.`, v)
+		return ev.errorf(`*** non-numeric first argument to "word" function: %q.`, v)
 	}
 	if index == 0 {
-		errorExit(ev.filename, ev.lineno, `*** first argument to "word" function must be greater than 0.`)
+		return ev.errorf(`*** first argument to "word" function must be greater than 0.`)
 	}
 	ws := newWordScanner(fargs[1])
 	for ws.Scan() {
@@ -350,28 +415,35 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "word", t)
+	return err
 }
 
 type funcWordlist struct{ fclosure }
 
 func (f *funcWordlist) Arity() int { return 3 }
-func (f *funcWordlist) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("wordlist", 3, len(f.args))
+func (f *funcWordlist) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("wordlist", 3, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	v := string(trimSpaceBytes(fargs[0]))
 	si, ok := numericValueForFunc(v)
 	if !ok {
-		errorExit(ev.filename, ev.lineno, `*** non-numeric first argument to "wordlist" function: %q.`, v)
+		return ev.errorf(`*** non-numeric first argument to "wordlist" function: %q.`, v)
 	}
 	if si == 0 {
-		errorExit(ev.filename, ev.lineno, `*** invalid first argument to "wordlist" function: %s`, f.args[1])
+		return ev.errorf(`*** invalid first argument to "wordlist" function: %s`, f.args[1])
 	}
 	v = string(trimSpaceBytes(fargs[1]))
 	ei, ok := numericValueForFunc(v)
 	if !ok {
-		errorExit(ev.filename, ev.lineno, `*** non-numeric second argument to "wordlist" function: %q.`, v)
+		return ev.errorf(`*** non-numeric second argument to "wordlist" function: %q.`, v)
 	}
 
 	ws := newWordScanner(fargs[2])
@@ -385,15 +457,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "wordlist", t)
+	return nil
 }
 
 type funcWords struct{ fclosure }
 
 func (f *funcWords) Arity() int { return 1 }
-func (f *funcWords) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("words", 1, len(f.args))
+func (f *funcWords) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("words", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	n := 0
@@ -403,15 +482,22 @@
 	freeBuf(abuf)
 	io.WriteString(w, strconv.Itoa(n))
 	stats.add("funcbody", "words", t)
+	return nil
 }
 
 type funcFirstword struct{ fclosure }
 
 func (f *funcFirstword) Arity() int { return 1 }
-func (f *funcFirstword) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("firstword", 1, len(f.args))
+func (f *funcFirstword) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("firstword", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	if ws.Scan() {
@@ -419,15 +505,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "firstword", t)
+	return nil
 }
 
 type funcLastword struct{ fclosure }
 
 func (f *funcLastword) Arity() int { return 1 }
-func (f *funcLastword) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("lastword", 1, len(f.args))
+func (f *funcLastword) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("lastword", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	var lw []byte
@@ -439,6 +532,7 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "lastword", t)
+	return err
 }
 
 // https://www.gnu.org/software/make/manual/html_node/File-Name-Functions.html#File-Name-Functions
@@ -446,10 +540,16 @@
 type funcJoin struct{ fclosure }
 
 func (f *funcJoin) Arity() int { return 2 }
-func (f *funcJoin) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("join", 2, len(f.args))
+func (f *funcJoin) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("join", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws1 := newWordScanner(fargs[0])
 	ws2 := newWordScanner(fargs[1])
@@ -464,15 +564,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "join", t)
+	return nil
 }
 
 type funcWildcard struct{ fclosure }
 
 func (f *funcWildcard) Arity() int { return 1 }
-func (f *funcWildcard) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("wildcard", 1, len(f.args))
+func (f *funcWildcard) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("wildcard", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	te := traceEvent.begin("wildcard", tmpval(abuf.Bytes()), traceEventMain)
 	if ev.avoidIO && !UseWildcardCache {
 		ev.hasIO = true
@@ -481,27 +588,37 @@
 		io.WriteString(w, " 2> /dev/null)")
 		traceEvent.end(te)
 		freeBuf(abuf)
-		return
+		return nil
 	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
 	for ws.Scan() {
 		pat := string(ws.Bytes())
-		wildcard(&sw, pat)
+		err = wildcard(&sw, pat)
+		if err != nil {
+			return err
+		}
 	}
 	traceEvent.end(te)
 	freeBuf(abuf)
 	stats.add("funcbody", "wildcard", t)
+	return nil
 }
 
 type funcDir struct{ fclosure }
 
 func (f *funcDir) Arity() int { return 1 }
-func (f *funcDir) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("dir", 1, len(f.args))
+func (f *funcDir) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("dir", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -515,15 +632,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "dir", t)
+	return nil
 }
 
 type funcNotdir struct{ fclosure }
 
 func (f *funcNotdir) Arity() int { return 1 }
-func (f *funcNotdir) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("notdir", 1, len(f.args))
+func (f *funcNotdir) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("notdir", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -537,15 +661,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "notdir", t)
+	return nil
 }
 
 type funcSuffix struct{ fclosure }
 
 func (f *funcSuffix) Arity() int { return 1 }
-func (f *funcSuffix) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("suffix", 1, len(f.args))
+func (f *funcSuffix) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("suffix", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -558,15 +689,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "suffix", t)
+	return err
 }
 
 type funcBasename struct{ fclosure }
 
 func (f *funcBasename) Arity() int { return 1 }
-func (f *funcBasename) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("basename", 1, len(f.args))
+func (f *funcBasename) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("basename", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -577,15 +715,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "basename", t)
+	return nil
 }
 
 type funcAddsuffix struct{ fclosure }
 
 func (f *funcAddsuffix) Arity() int { return 2 }
-func (f *funcAddsuffix) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("addsuffix", 2, len(f.args))
+func (f *funcAddsuffix) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("addsuffix", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	suf := fargs[0]
 	ws := newWordScanner(fargs[1])
@@ -597,15 +742,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "addsuffix", t)
+	return err
 }
 
 type funcAddprefix struct{ fclosure }
 
 func (f *funcAddprefix) Arity() int { return 2 }
-func (f *funcAddprefix) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("addprefix", 2, len(f.args))
+func (f *funcAddprefix) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("addprefix", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	pre := fargs[0]
 	ws := newWordScanner(fargs[1])
@@ -617,20 +769,27 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "addprefix", t)
+	return err
 }
 
 type funcRealpath struct{ fclosure }
 
 func (f *funcRealpath) Arity() int { return 1 }
-func (f *funcRealpath) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("realpath", 1, len(f.args))
+func (f *funcRealpath) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("realpath", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	if ev.avoidIO {
 		io.WriteString(w, "KATI_TODO(realpath)")
 		ev.hasIO = true
-		return
+		return nil
 	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -650,15 +809,22 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "realpath", t)
+	return err
 }
 
 type funcAbspath struct{ fclosure }
 
 func (f *funcAbspath) Arity() int { return 1 }
-func (f *funcAbspath) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("abspath", 1, len(f.args))
+func (f *funcAbspath) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("abspath", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	t := time.Now()
 	ws := newWordScanner(abuf.Bytes())
 	sw := ssvWriter{w: w}
@@ -673,64 +839,85 @@
 	}
 	freeBuf(abuf)
 	stats.add("funcbody", "abspath", t)
+	return nil
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Conditional-Functions
 type funcIf struct{ fclosure }
 
 func (f *funcIf) Arity() int { return 3 }
-func (f *funcIf) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("if", 2, len(f.args))
+func (f *funcIf) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("if", 2, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	if len(abuf.Bytes()) != 0 {
 		freeBuf(abuf)
-		f.args[2].Eval(w, ev)
-		return
+		return f.args[2].Eval(w, ev)
 	}
 	freeBuf(abuf)
 	if len(f.args) > 3 {
-		f.args[3].Eval(w, ev)
+		return f.args[3].Eval(w, ev)
 	}
+	return nil
 }
 
 type funcAnd struct{ fclosure }
 
 func (f *funcAnd) Arity() int { return 0 }
-func (f *funcAnd) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("and", 0, len(f.args))
+func (f *funcAnd) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("and", 0, len(f.args))
+	if err != nil {
+		return nil
+	}
 	abuf := newBuf()
 	var cond []byte
 	for _, arg := range f.args[1:] {
 		abuf.Reset()
-		arg.Eval(abuf, ev)
+		err = arg.Eval(abuf, ev)
+		if err != nil {
+			return err
+		}
 		cond = abuf.Bytes()
 		if len(cond) == 0 {
 			freeBuf(abuf)
-			return
+			return nil
 		}
 	}
 	w.Write(cond)
 	freeBuf(abuf)
+	return nil
 }
 
 type funcOr struct{ fclosure }
 
 func (f *funcOr) Arity() int { return 0 }
-func (f *funcOr) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("or", 0, len(f.args))
+func (f *funcOr) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("or", 0, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
 	for _, arg := range f.args[1:] {
 		abuf.Reset()
-		arg.Eval(abuf, ev)
+		err = arg.Eval(abuf, ev)
+		if err != nil {
+			return err
+		}
 		cond := abuf.Bytes()
 		if len(cond) != 0 {
 			w.Write(cond)
 			freeBuf(abuf)
-			return
+			return nil
 		}
 	}
 	freeBuf(abuf)
+	return nil
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Shell-Function
@@ -755,10 +942,16 @@
 	return true
 }
 
-func (f *funcShell) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("shell", 1, len(f.args))
+func (f *funcShell) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("shell", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	if ev.avoidIO && !hasNoIoInShellScript(abuf.Bytes()) {
 		te := traceEvent.begin("shell", tmpval(abuf.Bytes()), traceEventMain)
 		ev.hasIO = true
@@ -767,7 +960,7 @@
 		writeByte(w, ')')
 		traceEvent.end(te)
 		freeBuf(abuf)
-		return
+		return nil
 	}
 	arg := abuf.String()
 	freeBuf(abuf)
@@ -790,6 +983,7 @@
 	}
 	w.Write(formatCommandOutput(out))
 	traceEvent.end(te)
+	return nil
 }
 
 func (f *funcShell) Compact() Value {
@@ -825,9 +1019,12 @@
 
 func (f *funcCall) Arity() int { return 0 }
 
-func (f *funcCall) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcCall) Eval(w io.Writer, ev *Evaluator) error {
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1:]...)
+	fargs, err := ev.args(abuf, f.args[1:]...)
+	if err != nil {
+		return err
+	}
 	varname := fargs[0]
 	variable := string(varname)
 	te := traceEvent.begin("call", literal(variable), traceEventMain)
@@ -857,44 +1054,62 @@
 	if LogFlag {
 		w = io.MultiWriter(w, &buf)
 	}
-	v.Eval(w, ev)
+	err = v.Eval(w, ev)
+	if err != nil {
+		return err
+	}
 	ev.paramVars = oldParams
 	traceEvent.end(te)
 	if LogFlag {
 		logf("call %q variable %q return %q", f.args[1], variable, buf.Bytes())
 	}
 	freeBuf(abuf)
+	return nil
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Value-Function
 type funcValue struct{ fclosure }
 
 func (f *funcValue) Arity() int { return 1 }
-func (f *funcValue) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("value", 1, len(f.args))
+func (f *funcValue) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("value", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	v := ev.LookupVar(f.args[1].String())
 	io.WriteString(w, v.String())
+	return nil
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Eval-Function
 type funcEval struct{ fclosure }
 
 func (f *funcEval) Arity() int { return 1 }
-func (f *funcEval) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("eval", 1, len(f.args))
-	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
-	s := abuf.Bytes()
-	logf("eval %q at %s:%d", s, ev.filename, ev.lineno)
-	mk, err := parseMakefileBytes(s, ev.filename, ev.lineno)
+func (f *funcEval) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("eval", 1, len(f.args))
 	if err != nil {
-		panic(err)
+		return err
+	}
+	abuf := newBuf()
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
+	s := abuf.Bytes()
+	logf("eval %q at %s", s, ev.srcpos)
+	mk, err := parseMakefileBytes(s, ev.srcpos)
+	if err != nil {
+		return ev.errorf("%v", err)
 	}
 
 	for _, stmt := range mk.stmts {
-		ev.eval(stmt)
+		err = ev.eval(stmt)
+		if err != nil {
+			return err
+		}
 	}
 	freeBuf(abuf)
+	return nil
 }
 
 func (f *funcEval) Compact() Value {
@@ -963,16 +1178,16 @@
 
 type funcNop struct{ expr string }
 
-func (f *funcNop) String() string             { return f.expr }
-func (f *funcNop) Eval(io.Writer, *Evaluator) {}
+func (f *funcNop) String() string                   { return f.expr }
+func (f *funcNop) Eval(io.Writer, *Evaluator) error { return nil }
 func (f *funcNop) serialize() serializableVar {
 	return serializableVar{
 		Type: "funcNop",
 		V:    f.expr,
 	}
 }
-func (f *funcNop) dump(w io.Writer) {
-	dumpByte(w, valueTypeNop)
+func (f *funcNop) dump(d *dumpbuf) {
+	d.Byte(valueTypeNop)
 }
 
 func parseAssignLiteral(s string) (lhs, op string, rhs Value, ok bool) {
@@ -1007,9 +1222,12 @@
 	return fmt.Sprintf("$(eval %s %s %s)", f.lhs, f.op, f.rhs)
 }
 
-func (f *funcEvalAssign) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcEvalAssign) Eval(w io.Writer, ev *Evaluator) error {
 	var abuf bytes.Buffer
-	f.rhs.Eval(&abuf, ev)
+	err := f.rhs.Eval(&abuf, ev)
+	if err != nil {
+		return err
+	}
 	rhs := trimLeftSpaceBytes(abuf.Bytes())
 	var rvalue Var
 	switch f.op {
@@ -1018,10 +1236,13 @@
 		// literal? e.g. literal("$(foo)") => varref{literal("foo")}.
 		exp, _, err := parseExpr(rhs, nil, false)
 		if err != nil {
-			panic(fmt.Sprintf("eval assign error: %q: %v", f.String(), err))
+			return ev.errorf("eval assign error: %q: %v", f.String(), err)
 		}
 		vbuf := newBuf()
-		exp.Eval(vbuf, ev)
+		err = exp.Eval(vbuf, ev)
+		if err != nil {
+			return err
+		}
 		rvalue = &simpleVar{value: vbuf.String(), origin: "file"}
 		freeBuf(vbuf)
 	case "=":
@@ -1029,14 +1250,17 @@
 	case "+=":
 		prev := ev.LookupVar(f.lhs)
 		if prev.IsDefined() {
-			rvalue = prev.Append(ev, string(rhs))
+			rvalue, err = prev.Append(ev, string(rhs))
+			if err != nil {
+				return err
+			}
 		} else {
 			rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"}
 		}
 	case "?=":
 		prev := ev.LookupVar(f.lhs)
 		if prev.IsDefined() {
-			return
+			return nil
 		}
 		rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"}
 	}
@@ -1044,6 +1268,7 @@
 		logf("Eval ASSIGN: %s=%q (flavor:%q)", f.lhs, rvalue, rvalue.Flavor())
 	}
 	ev.outVars.Assign(f.lhs, rvalue)
+	return nil
 }
 
 func (f *funcEvalAssign) serialize() serializableVar {
@@ -1057,80 +1282,107 @@
 	}
 }
 
-func (f *funcEvalAssign) dump(w io.Writer) {
-	dumpByte(w, valueTypeAssign)
-	dumpString(w, f.lhs)
-	dumpString(w, f.op)
-	f.rhs.dump(w)
+func (f *funcEvalAssign) dump(d *dumpbuf) {
+	d.Byte(valueTypeAssign)
+	d.Str(f.lhs)
+	d.Str(f.op)
+	f.rhs.dump(d)
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Origin-Function
 type funcOrigin struct{ fclosure }
 
 func (f *funcOrigin) Arity() int { return 1 }
-func (f *funcOrigin) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("origin", 1, len(f.args))
+func (f *funcOrigin) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("origin", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	v := ev.LookupVar(f.args[1].String())
 	io.WriteString(w, v.Origin())
+	return nil
 }
 
 // https://www.gnu.org/software/make/manual/html_node/Flavor-Function.html#Flavor-Function
 type funcFlavor struct{ fclosure }
 
 func (f *funcFlavor) Arity() int { return 1 }
-func (f *funcFlavor) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("flavor", 1, len(f.args))
+func (f *funcFlavor) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("flavor", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	v := ev.LookupVar(f.args[1].String())
 	io.WriteString(w, v.Flavor())
+	return nil
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Make-Control-Functions
 type funcInfo struct{ fclosure }
 
 func (f *funcInfo) Arity() int { return 1 }
-func (f *funcInfo) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("info", 1, len(f.args))
+func (f *funcInfo) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("info", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	if ev.avoidIO {
 		io.WriteString(w, "KATI_TODO(info)")
 		ev.hasIO = true
-		return
+		return nil
 	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
 	fmt.Printf("%s\n", abuf.String())
 	freeBuf(abuf)
+	return nil
 }
 
 type funcWarning struct{ fclosure }
 
 func (f *funcWarning) Arity() int { return 1 }
-func (f *funcWarning) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("warning", 1, len(f.args))
+func (f *funcWarning) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("warning", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	if ev.avoidIO {
 		io.WriteString(w, "KATI_TODO(warning)")
 		ev.hasIO = true
-		return
+		return nil
 	}
 	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
-	fmt.Printf("%s:%d: %s\n", ev.filename, ev.lineno, abuf.String())
+	err = f.args[1].Eval(abuf, ev)
+	if err != nil {
+		return err
+	}
+	fmt.Printf("%s: %s\n", ev.srcpos, abuf.String())
 	freeBuf(abuf)
+	return nil
 }
 
 type funcError struct{ fclosure }
 
 func (f *funcError) Arity() int { return 1 }
-func (f *funcError) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("error", 1, len(f.args))
+func (f *funcError) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("error", 1, len(f.args))
+	if err != nil {
+		return err
+	}
 	if ev.avoidIO {
 		io.WriteString(w, "KATI_TODO(error)")
 		ev.hasIO = true
-		return
+		return nil
 	}
-	abuf := newBuf()
-	f.args[1].Eval(abuf, ev)
-	errorExit(ev.filename, ev.lineno, "*** %s.", abuf.String())
-	freeBuf(abuf)
+	var abuf buffer
+	err = f.args[1].Eval(&abuf, ev)
+	if err != nil {
+		return err
+	}
+	return ev.errorf("*** %s.", abuf.String())
 }
 
 // http://www.gnu.org/software/make/manual/make.html#Foreach-Function
@@ -1138,10 +1390,16 @@
 
 func (f *funcForeach) Arity() int { return 3 }
 
-func (f *funcForeach) Eval(w io.Writer, ev *Evaluator) {
-	assertArity("foreach", 3, len(f.args))
+func (f *funcForeach) Eval(w io.Writer, ev *Evaluator) error {
+	err := assertArity("foreach", 3, len(f.args))
+	if err != nil {
+		return err
+	}
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.args[1], f.args[2])
+	fargs, err := ev.args(abuf, f.args[1], f.args[2])
+	if err != nil {
+		return err
+	}
 	varname := string(fargs[0])
 	ws := newWordScanner(fargs[1])
 	text := f.args[3]
@@ -1154,8 +1412,12 @@
 		if space {
 			writeByte(w, ' ')
 		}
-		text.Eval(w, ev)
+		err = text.Eval(w, ev)
+		if err != nil {
+			return err
+		}
 		space = true
 	}
 	freeBuf(abuf)
+	return nil
 }
diff --git a/log.go b/log.go
index 811792d..29609e7 100644
--- a/log.go
+++ b/log.go
@@ -17,7 +17,6 @@
 import (
 	"bytes"
 	"fmt"
-	"os"
 	"sync"
 )
 
@@ -47,32 +46,12 @@
 	logAlways(f, a...)
 }
 
-func warn(filename string, lineno int, f string, a ...interface{}) {
-	f = fmt.Sprintf("%s:%d: warning: %s\n", filename, lineno, f)
+func warn(loc srcpos, f string, a ...interface{}) {
+	f = fmt.Sprintf("%s: warning: %s\n", loc, f)
 	fmt.Printf(f, a...)
 }
 
-func warnNoPrefix(filename string, lineno int, f string, a ...interface{}) {
-	f = fmt.Sprintf("%s:%d: %s\n", filename, lineno, f)
+func warnNoPrefix(loc srcpos, f string, a ...interface{}) {
+	f = fmt.Sprintf("%s: %s\n", loc, f)
 	fmt.Printf(f, a...)
 }
-
-var atErrors []func()
-
-func AtError(f func()) {
-	atErrors = append(atErrors, f)
-}
-
-func errorExit(filename string, lineno int, f string, a ...interface{}) {
-	f = fmt.Sprintf("%s:%d: %s", filename, lineno, f)
-	errorNoLocationExit(f, a...)
-}
-
-func errorNoLocationExit(f string, a ...interface{}) {
-	f = fmt.Sprintf("%s\n", f)
-	fmt.Printf(f, a...)
-	for i := len(atErrors) - 1; i >= 0; i-- {
-		atErrors[i]()
-	}
-	os.Exit(2)
-}
diff --git a/ninja.go b/ninja.go
index 5f6c648..9acf664 100644
--- a/ninja.go
+++ b/ninja.go
@@ -33,21 +33,17 @@
 	ex      *Executor
 	ruleID  int
 	done    map[string]bool
-	ccRe    *regexp.Regexp
 	gomaDir string
 }
 
+var ccRE = regexp.MustCompile(`^prebuilts/(gcc|clang)/.*(gcc|g\+\+|clang|clang\+\+) .* -c `)
+
 func newNinjaGenerator(g *DepGraph, gomaDir string) *ninjaGenerator {
-	ccRe, err := regexp.Compile(`^prebuilts/(gcc|clang)/.*(gcc|g\+\+|clang|clang\+\+) .* -c `)
-	if err != nil {
-		panic(err)
-	}
 	return &ninjaGenerator{
 		nodes:   g.nodes,
 		vars:    g.vars,
 		exports: g.exports,
 		done:    make(map[string]bool),
-		ccRe:    ccRe,
 		gomaDir: gomaDir,
 	}
 }
@@ -104,7 +100,7 @@
 		rest := ss[i+len(mvCmd):]
 		ei := strings.IndexByte(rest, ')')
 		if ei < 0 {
-			panic(ss)
+			return "", fmt.Errorf("unbalanced parenthes? %s", ss)
 		}
 		return rest[:ei], nil
 	}
@@ -175,7 +171,7 @@
 		if cmd == "" {
 			cmd = "true"
 		}
-		if n.gomaDir != "" && n.ccRe.MatchString(cmd) {
+		if n.gomaDir != "" && ccRE.MatchString(cmd) {
 			cmd = fmt.Sprintf("%s/gomacc %s", n.gomaDir, cmd)
 			useGomacc = true
 		}
@@ -229,17 +225,20 @@
 	return dep
 }
 
-func (n *ninjaGenerator) emitNode(node *DepNode) {
+func (n *ninjaGenerator) emitNode(node *DepNode) error {
 	if n.done[node.Output] {
-		return
+		return nil
 	}
 	n.done[node.Output] = true
 
 	if len(node.Cmds) == 0 && len(node.Deps) == 0 && !node.IsPhony {
-		return
+		return nil
 	}
 
-	runners, _ := n.ex.createRunners(node, true)
+	runners, _, err := n.ex.createRunners(node, true)
+	if err != nil {
+		return err
+	}
 	ruleName := "phony"
 	useLocalPool := false
 	if len(runners) > 0 {
@@ -253,7 +252,7 @@
 		}
 		depfile, err := getDepfile(ss)
 		if err != nil {
-			panic(err)
+			return err
 		}
 		if depfile != "" {
 			fmt.Fprintf(n.f, " depfile = %s\n", depfile)
@@ -275,26 +274,39 @@
 	}
 
 	for _, d := range node.Deps {
-		n.emitNode(d)
+		err := n.emitNode(d)
+		if err != nil {
+			return err
+		}
 	}
+	return nil
 }
 
-func (n *ninjaGenerator) generateShell() {
+func (n *ninjaGenerator) generateShell() (err error) {
 	f, err := os.Create("ninja.sh")
 	if err != nil {
-		panic(err)
+		return err
 	}
-	defer f.Close()
+	defer func() {
+		cerr := f.Close()
+		if err == nil {
+			err = cerr
+		}
+	}()
 
 	ev := NewEvaluator(n.vars)
-	shell := ev.EvaluateVar("SHELL")
-	if shell == "" {
+	shell, err := ev.EvaluateVar("SHELL")
+	if err != nil {
 		shell = "/bin/sh"
 	}
 	fmt.Fprintf(f, "#!%s\n", shell)
 	for name, export := range n.exports {
 		if export {
-			fmt.Fprintf(f, "export %s=%s\n", name, ev.EvaluateVar(name))
+			v, err := ev.EvaluateVar(name)
+			if err != nil {
+				return err
+			}
+			fmt.Fprintf(f, "export %s=%s\n", name, v)
 		} else {
 			fmt.Fprintf(f, "unset %s\n", name)
 		}
@@ -305,18 +317,20 @@
 		fmt.Fprintln(f, `exec ninja -j300 "$@"`)
 	}
 
-	err = f.Chmod(0755)
-	if err != nil {
-		panic(err)
-	}
+	return f.Chmod(0755)
 }
 
-func (n *ninjaGenerator) generateNinja() {
+func (n *ninjaGenerator) generateNinja() (err error) {
 	f, err := os.Create("build.ninja")
 	if err != nil {
-		panic(err)
+		return err
 	}
-	defer f.Close()
+	defer func() {
+		cerr := f.Close()
+		if err == nil {
+			err = cerr
+		}
+	}()
 
 	n.f = f
 	fmt.Fprintf(n.f, "# Generated by kati\n")
@@ -327,16 +341,31 @@
 		fmt.Fprintf(n.f, " depth = %d\n", runtime.NumCPU())
 	}
 
-	n.ex = NewExecutor(n.vars, nil)
-	for _, node := range n.nodes {
-		n.emitNode(node)
+	n.ex, err = NewExecutor(n.vars, nil)
+	if err != nil {
+		return err
 	}
+	for _, node := range n.nodes {
+		err := n.emitNode(node)
+		if err != nil {
+			return err
+		}
+	}
+	return nil
 }
 
-func GenerateNinja(g *DepGraph, gomaDir string) {
+// GenerateNinja generates build.ninja from DepGraph.
+func GenerateNinja(g *DepGraph, gomaDir string) error {
 	startTime := time.Now()
 	n := newNinjaGenerator(g, gomaDir)
-	n.generateShell()
-	n.generateNinja()
+	err := n.generateShell()
+	if err != nil {
+		return err
+	}
+	err = n.generateNinja()
+	if err != nil {
+		return err
+	}
 	logStats("generate ninja time: %q", time.Since(startTime))
+	return nil
 }
diff --git a/para.go b/para.go
index ab90825..6993dfb 100644
--- a/para.go
+++ b/para.go
@@ -29,38 +29,46 @@
 	return 0
 }
 
-func sendMsg(w io.Writer, data []byte) {
-	for len(data) != 0 {
-		written, err := w.Write(data)
-		if err == io.EOF {
-			return
-		}
-		if err != nil {
-			panic(err)
-		}
-		data = data[written:]
-	}
+type paraConn struct {
+	w   io.WriteCloser
+	r   *bufio.Reader
+	err error
 }
 
-func sendInt(w io.Writer, i int) {
+func (c *paraConn) sendMsg(data []byte) error {
+	if c.err != nil {
+		return c.err
+	}
+	_, err := c.w.Write(data)
+	c.err = err
+	return err
+}
+
+func (c *paraConn) sendInt(i int) error {
+	if c.err != nil {
+		return c.err
+	}
 	v := int32(i)
-	binary.Write(w, binary.LittleEndian, &v)
+	c.err = binary.Write(c.w, binary.LittleEndian, &v)
+	return c.err
 }
 
-func sendString(w io.Writer, s string) {
-	sendInt(w, len(s))
-	sendMsg(w, []byte(s))
+func (c *paraConn) sendString(s string) error {
+	c.sendInt(len(s))
+	c.sendMsg([]byte(s))
+	return c.err
 }
 
-func sendRunners(w io.Writer, runners []runner) {
-	sendInt(w, len(runners))
+func (c *paraConn) sendRunners(runners []runner) error {
+	c.sendInt(len(runners))
 	for _, r := range runners {
-		sendString(w, r.output)
-		sendString(w, r.cmd)
-		sendString(w, r.shell)
-		sendInt(w, btoi(r.echo))
-		sendInt(w, btoi(r.ignoreError))
+		c.sendString(r.output)
+		c.sendString(r.cmd)
+		c.sendString(r.shell)
+		c.sendInt(btoi(r.echo))
+		c.sendInt(btoi(r.ignoreError))
 	}
+	return c.err
 }
 
 type paraResult struct {
@@ -71,49 +79,37 @@
 	signal int
 }
 
-func recvInt(r *bufio.Reader) (int, error) {
+func (c *paraConn) recvInt() (int, error) {
+	if c.err != nil {
+		return 0, c.err
+	}
 	var v int32
-	err := binary.Read(r, binary.LittleEndian, &v)
-	return int(v), err
+	c.err = binary.Read(c.r, binary.LittleEndian, &v)
+	return int(v), c.err
 }
 
-func recvString(r *bufio.Reader) (string, error) {
-	l, err := recvInt(r)
+func (c *paraConn) recvString() (string, error) {
+	l, err := c.recvInt()
 	if err != nil {
+		c.err = err
 		return "", err
 	}
 	buf := make([]byte, l)
-	read := 0
-	for read < len(buf) {
-		r, err := r.Read(buf[read:])
-		if err != nil {
-			return "", err
-		}
-		read += r
+	_, c.err = io.ReadFull(c.r, buf)
+	if c.err != nil {
+		return "", c.err
 	}
 	return string(buf), nil
 }
 
-func recvResult(r *bufio.Reader) (*paraResult, error) {
-	output, err := recvString(r)
-	if err != nil {
-		return nil, err
-	}
-	stdout, err := recvString(r)
-	if err != nil {
-		return nil, err
-	}
-	stderr, err := recvString(r)
-	if err != nil {
-		return nil, err
-	}
-	status, err := recvInt(r)
-	if err != nil {
-		return nil, err
-	}
-	signal, err := recvInt(r)
-	if err != nil {
-		return nil, err
+func (c *paraConn) recvResult() (*paraResult, error) {
+	output, _ := c.recvString()
+	stdout, _ := c.recvString()
+	stderr, _ := c.recvString()
+	status, _ := c.recvInt()
+	signal, _ := c.recvInt()
+	if c.err != nil {
+		return nil, c.err
 	}
 	return &paraResult{
 		output: output,
@@ -127,55 +123,58 @@
 type paraWorker struct {
 	para     *exec.Cmd
 	paraChan chan *paraResult
-	stdin    io.WriteCloser
-	stdout   *bufio.Reader
+	c        *paraConn
 	doneChan chan bool
 }
 
-func newParaWorker(paraChan chan *paraResult, numJobs int, paraPath string) *paraWorker {
+func newParaWorker(paraChan chan *paraResult, numJobs int, paraPath string) (*paraWorker, error) {
 	para := exec.Command(paraPath, fmt.Sprintf("-j%d", numJobs), "--kati")
 	stdin, err := para.StdinPipe()
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	stdout, err := para.StdoutPipe()
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	err = para.Start()
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	return &paraWorker{
 		para:     para,
 		paraChan: paraChan,
-		stdin:    stdin,
-		stdout:   bufio.NewReader(stdout),
+		c: &paraConn{
+			w: stdin,
+			r: bufio.NewReader(stdout),
+		},
 		doneChan: make(chan bool),
-	}
+	}, nil
 }
 
-func (para *paraWorker) Run() {
+func (para *paraWorker) Run() error {
 	for {
-		r, err := recvResult(para.stdout)
-		if err == io.EOF {
-			break
-		}
+		r, err := para.c.recvResult()
 		if err != nil {
-			panic(err)
+			break
 		}
 		para.paraChan <- r
 	}
 	para.para.Process.Kill()
 	para.para.Process.Wait()
 	para.doneChan <- true
+	return para.c.err
 }
 
-func (para *paraWorker) Wait() {
-	para.stdin.Close()
+func (para *paraWorker) Wait() error {
+	para.c.w.Close()
 	<-para.doneChan
+	if para.c.err == io.EOF {
+		return nil
+	}
+	return para.c.err
 }
 
-func (para *paraWorker) RunCommand(runners []runner) {
-	sendRunners(para.stdin, runners)
+func (para *paraWorker) RunCommand(runners []runner) error {
+	return para.c.sendRunners(runners)
 }
diff --git a/para_test.go b/para_test.go
index c0a78e2..73e6350 100644
--- a/para_test.go
+++ b/para_test.go
@@ -23,13 +23,16 @@
 func TestPara(t *testing.T) {
 	cwd, err := filepath.Abs(".")
 	if err != nil {
-		panic(err)
+		t.Fatal(err)
 	}
 	paraPath := filepath.Join(cwd, "para")
 	numJobs := 4
 
 	paraChan := make(chan *paraResult)
-	para := newParaWorker(paraChan, numJobs, paraPath)
+	para, err := newParaWorker(paraChan, numJobs, paraPath)
+	if err != nil {
+		t.Fatal(err)
+	}
 	go para.Run()
 
 	numTasks := 100
@@ -61,5 +64,8 @@
 		}
 	}
 
-	para.Wait()
+	err = para.Wait()
+	if err != nil {
+		t.Errorf("para.Wait()=%v; want=<nil>", err)
+	}
 }
diff --git a/parser.go b/parser.go
index 06e5f74..c035791 100644
--- a/parser.go
+++ b/parser.go
@@ -23,6 +23,7 @@
 	"bufio"
 	"bytes"
 	"crypto/sha1"
+	"errors"
 	"fmt"
 	"io"
 	"io/ioutil"
@@ -48,14 +49,13 @@
 	lineno      int
 	elineno     int // lineno == elineno unless there is trailing '\'.
 	linenoFixed bool
-	unBuf       []byte
-	hasUnBuf    bool
 	done        bool
 	outStmts    *[]ast
 	ifStack     []ifState
 	inDef       []string
 	defOpt      string
 	numIfNest   int
+	err         error
 }
 
 func newParser(rd io.Reader, filename string) *parser {
@@ -67,16 +67,18 @@
 	return p
 }
 
+func (p *parser) srcpos() srcpos {
+	return srcpos{
+		filename: p.mk.filename,
+		lineno:   p.lineno,
+	}
+}
+
 func (p *parser) addStatement(stmt ast) {
 	*p.outStmts = append(*p.outStmts, stmt)
 }
 
 func (p *parser) readLine() []byte {
-	if p.hasUnBuf {
-		p.hasUnBuf = false
-		return p.unBuf
-	}
-
 	if !p.linenoFixed {
 		p.lineno = p.elineno
 	}
@@ -88,7 +90,8 @@
 	if err == io.EOF {
 		p.done = true
 	} else if err != nil {
-		panic(fmt.Errorf("readline %s:%d: %v", p.mk.filename, p.lineno, err))
+		p.err = fmt.Errorf("readline %s: %v", p.srcpos(), err)
+		p.done = true
 	}
 
 	line = bytes.TrimRight(line, "\r\n")
@@ -163,22 +166,14 @@
 	return line
 }
 
-func (p *parser) unreadLine(line []byte) {
-	if p.hasUnBuf {
-		panic("unreadLine twice!")
-	}
-	p.unBuf = line
-	p.hasUnBuf = true
-}
-
-func newAssignAST(p *parser, lhsBytes []byte, rhsBytes []byte, op string) *assignAST {
+func newAssignAST(p *parser, lhsBytes []byte, rhsBytes []byte, op string) (*assignAST, error) {
 	lhs, _, err := parseExpr(lhsBytes, nil, true)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	rhs, _, err := parseExpr(rhsBytes, nil, true)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	opt := ""
 	if p != nil {
@@ -189,20 +184,22 @@
 		rhs: rhs,
 		op:  op,
 		opt: opt,
-	}
+	}, nil
 }
 
-func (p *parser) parseAssign(line []byte, sep, esep int) ast {
+func (p *parser) parseAssign(line []byte, sep, esep int) (ast, error) {
 	logf("parseAssign %q op:%q", line, line[sep:esep])
-	aast := newAssignAST(p, bytes.TrimSpace(line[:sep]), trimLeftSpaceBytes(line[esep:]), string(line[sep:esep]))
-	aast.filename = p.mk.filename
-	aast.lineno = p.lineno
-	return aast
+	aast, err := newAssignAST(p, bytes.TrimSpace(line[:sep]), trimLeftSpaceBytes(line[esep:]), string(line[sep:esep]))
+	if err != nil {
+		return nil, err
+	}
+	aast.srcpos = p.srcpos()
+	return aast, nil
 }
 
-func (p *parser) parseMaybeRule(line []byte, equalIndex, semicolonIndex int) ast {
+func (p *parser) parseMaybeRule(line []byte, equalIndex, semicolonIndex int) (ast, error) {
 	if len(trimSpaceBytes(line)) == 0 {
-		return nil
+		return nil, nil
 	}
 
 	expr := line
@@ -229,7 +226,7 @@
 
 	v, _, err := parseExpr(expr, nil, true)
 	if err != nil {
-		panic(fmt.Errorf("parse %s:%d %v", p.mk.filename, p.lineno, err))
+		return nil, p.srcpos().error(err)
 	}
 
 	rast := &maybeRuleAST{
@@ -237,106 +234,110 @@
 		term:      term,
 		afterTerm: afterTerm,
 	}
-	rast.filename = p.mk.filename
-	rast.lineno = p.lineno
-	return rast
+	rast.srcpos = p.srcpos()
+	return rast, nil
 }
 
-func (p *parser) parseInclude(line string, oplen int) ast {
+func (p *parser) parseInclude(line string, oplen int) {
 	// TODO(ukai): parse expr here
 	iast := &includeAST{
 		expr: line[oplen+1:],
 		op:   line[:oplen],
 	}
-	iast.filename = p.mk.filename
-	iast.lineno = p.lineno
-	return iast
+	iast.srcpos = p.srcpos()
+	p.addStatement(iast)
 }
 
-func (p *parser) parseIfdef(line []byte, oplen int) ast {
+func (p *parser) parseIfdef(line []byte, oplen int) {
 	lhs, _, err := parseExpr(trimLeftSpaceBytes(line[oplen+1:]), nil, true)
 	if err != nil {
-		panic(fmt.Errorf("ifdef parse %s:%d %v", p.mk.filename, p.lineno, err))
+		p.err = p.srcpos().error(err)
+		return
 	}
 	iast := &ifAST{
 		op:  string(line[:oplen]),
 		lhs: lhs,
 	}
-	iast.filename = p.mk.filename
-	iast.lineno = p.lineno
+	iast.srcpos = p.srcpos()
 	p.addStatement(iast)
 	p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest})
 	p.outStmts = &iast.trueStmts
-	return iast
 }
 
-func (p *parser) parseTwoQuotes(s string, op string) ([]string, bool) {
+func (p *parser) parseTwoQuotes(s string, op string) ([]string, bool, error) {
 	var args []string
 	for i := 0; i < 2; i++ {
 		s = strings.TrimSpace(s)
 		if s == "" {
-			return nil, false
+			return nil, false, nil
 		}
 		quote := s[0]
 		if quote != '\'' && quote != '"' {
-			return nil, false
+			return nil, false, nil
 		}
 		end := strings.IndexByte(s[1:], quote) + 1
 		if end < 0 {
-			return nil, false
+			return nil, false, nil
 		}
 		args = append(args, s[1:end])
 		s = s[end+1:]
 	}
 	if len(s) > 0 {
-		errorExit(p.mk.filename, p.lineno, `extraneous text after %q directive`, op)
+		return nil, false, p.srcpos().errorf(`extraneous text after %q directive`, op)
 	}
-	return args, true
+	return args, true, nil
 }
 
 // parse
 //  "(lhs, rhs)"
 //  "lhs, rhs"
-func (p *parser) parseEq(s string, op string) (string, string, bool) {
+func (p *parser) parseEq(s string, op string) (string, string, bool, error) {
 	if s[0] == '(' && s[len(s)-1] == ')' {
 		s = s[1 : len(s)-1]
 		term := []byte{','}
 		in := []byte(s)
 		v, n, err := parseExpr(in, term, false)
 		if err != nil {
-			return "", "", false
+			return "", "", false, err
 		}
 		lhs := v.String()
 		n++
 		n += skipSpaces(in[n:], nil)
 		v, n, err = parseExpr(in[n:], nil, false)
 		if err != nil {
-			return "", "", false
+			return "", "", false, err
 		}
 		rhs := v.String()
-		return lhs, rhs, true
+		return lhs, rhs, true, nil
 	}
-	args, ok := p.parseTwoQuotes(s, op)
+	args, ok, err := p.parseTwoQuotes(s, op)
 	if !ok {
-		return "", "", false
+		return "", "", false, err
 	}
-	return args[0], args[1], true
+	return args[0], args[1], true, nil
 }
 
-func (p *parser) parseIfeq(line string, oplen int) ast {
+func (p *parser) parseIfeq(line string, oplen int) {
 	op := line[:oplen]
-	lhsBytes, rhsBytes, ok := p.parseEq(strings.TrimSpace(line[oplen+1:]), op)
+	lhsBytes, rhsBytes, ok, err := p.parseEq(strings.TrimSpace(line[oplen+1:]), op)
+	if err != nil {
+		p.err = err
+		return
+	}
 	if !ok {
-		errorExit(p.mk.filename, p.lineno, `*** invalid syntax in conditional.`)
+		p.err = p.srcpos().errorf(`*** invalid syntax in conditional.`)
+		return
 	}
 
 	lhs, _, err := parseExpr([]byte(lhsBytes), nil, true)
 	if err != nil {
-		panic(fmt.Errorf("parse ifeq lhs %s:%d %v", p.mk.filename, p.lineno, err))
+		p.err = p.srcpos().error(err)
+		return
 	}
 	rhs, _, err := parseExpr([]byte(rhsBytes), nil, true)
 	if err != nil {
-		panic(fmt.Errorf("parse ifeq rhs %s:%d %v", p.mk.filename, p.lineno, err))
+		p.err = p.srcpos().error(err)
+		return
 	}
 
 	iast := &ifAST{
@@ -344,25 +345,30 @@
 		lhs: lhs,
 		rhs: rhs,
 	}
-	iast.filename = p.mk.filename
-	iast.lineno = p.lineno
+	iast.srcpos = p.srcpos()
 	p.addStatement(iast)
 	p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest})
 	p.outStmts = &iast.trueStmts
-	return iast
+	return
 }
 
-func (p *parser) checkIfStack(curKeyword string) {
+func (p *parser) checkIfStack(curKeyword string) error {
 	if len(p.ifStack) == 0 {
-		errorExit(p.mk.filename, p.lineno, `*** extraneous %q.`, curKeyword)
+		return p.srcpos().errorf(`*** extraneous %q.`, curKeyword)
 	}
+	return nil
 }
 
 func (p *parser) parseElse(line []byte) {
-	p.checkIfStack("else")
+	err := p.checkIfStack("else")
+	if err != nil {
+		p.err = err
+		return
+	}
 	state := &p.ifStack[len(p.ifStack)-1]
 	if state.inElse {
-		errorExit(p.mk.filename, p.lineno, `*** only one "else" per conditional.`)
+		p.err = p.srcpos().errorf(`*** only one "else" per conditional.`)
+		return
 	}
 	state.inElse = true
 	p.outStmts = &state.ast.falseStmts
@@ -384,11 +390,16 @@
 		return
 	}
 	p.numIfNest = 0
-	warnNoPrefix(p.mk.filename, p.lineno, "extraneous text after `else` directive")
+	warnNoPrefix(p.srcpos(), "extraneous text after `else` directive")
+	return
 }
 
 func (p *parser) parseEndif(line string) {
-	p.checkIfStack("endif")
+	err := p.checkIfStack("endif")
+	if err != nil {
+		p.err = err
+		return
+	}
 	state := p.ifStack[len(p.ifStack)-1]
 	for t := 0; t <= state.numNest; t++ {
 		p.ifStack = p.ifStack[0 : len(p.ifStack)-1]
@@ -403,6 +414,7 @@
 			}
 		}
 	}
+	return
 }
 
 type directiveFunc func(*parser, []byte) []byte
@@ -447,12 +459,12 @@
 }
 
 func includeDirective(p *parser, line []byte) []byte {
-	p.addStatement(p.parseInclude(string(line), len("include")))
+	p.parseInclude(string(line), len("include"))
 	return nil
 }
 
 func sincludeDirective(p *parser, line []byte) []byte {
-	p.addStatement(p.parseInclude(string(line), len("-include")))
+	p.parseInclude(string(line), len("-include"))
 	return nil
 }
 
@@ -522,8 +534,7 @@
 		expr:   line,
 		export: export,
 	}
-	east.filename = p.mk.filename
-	east.lineno = p.lineno
+	east.srcpos = p.srcpos()
 	p.addStatement(east)
 	return hasEqual
 }
@@ -561,7 +572,7 @@
 	if found >= 0 && s[:found] == "endef" {
 		rest := strings.TrimSpace(s[found+1:])
 		if rest != "" && rest[0] != '#' {
-			warnNoPrefix(p.mk.filename, p.lineno, "extraneous text after \"endef\" directive")
+			warnNoPrefix(p.srcpos(), "extraneous text after \"endef\" directive")
 		}
 		return true
 	}
@@ -569,11 +580,6 @@
 }
 
 func (p *parser) parse() (mk makefile, err error) {
-	defer func() {
-		if r := recover(); r != nil {
-			err = fmt.Errorf("panic in parse %s: %v", mk.filename, r)
-		}
-	}()
 	for !p.done {
 		line := p.readLine()
 
@@ -581,9 +587,12 @@
 			lineStr := string(p.processDefineLine(line))
 			if p.isEndef(lineStr) {
 				logf("multilineAssign %q", p.inDef)
-				aast := newAssignAST(p, []byte(p.inDef[0]), []byte(strings.Join(p.inDef[1:], "\n")), "=")
-				aast.filename = p.mk.filename
-				aast.lineno = p.lineno - len(p.inDef)
+				aast, err := newAssignAST(p, []byte(p.inDef[0]), []byte(strings.Join(p.inDef[1:], "\n")), "=")
+				if err != nil {
+					return makefile{}, err
+				}
+				aast.srcpos = p.srcpos()
+				aast.srcpos.lineno -= len(p.inDef)
 				p.addStatement(aast)
 				p.inDef = nil
 				p.defOpt = ""
@@ -601,14 +610,16 @@
 		if f, ok := p.isDirective(line, makeDirectives); ok {
 			line = trimSpaceBytes(p.processMakefileLine(line))
 			line = f(p, line)
+			if p.err != nil {
+				return makefile{}, p.err
+			}
 			if len(line) == 0 {
 				continue
 			}
 		}
 		if line[0] == '\t' {
 			cast := &commandAST{cmd: string(p.processRecipeLine(line[1:]))}
-			cast.filename = p.mk.filename
-			cast.lineno = p.lineno
+			cast.srcpos = p.srcpos()
 			p.addStatement(cast)
 			continue
 		}
@@ -626,7 +637,7 @@
 				parenStack = append(parenStack, ch)
 			case ')', '}':
 				if len(parenStack) == 0 {
-					warn(p.mk.filename, p.lineno, "Unmatched parens: %s", line)
+					warn(p.srcpos(), "Unmatched parens: %s", line)
 				} else {
 					cp := closeParen(parenStack[len(parenStack)-1])
 					if cp == ch {
@@ -642,7 +653,10 @@
 			case ':':
 				if i+1 < len(line) && line[i+1] == '=' {
 					if !isRule {
-						stmt = p.parseAssign(line, i, i+2)
+						stmt, err = p.parseAssign(line, i, i+2)
+						if err != nil {
+							return makefile{}, err
+						}
 					}
 				} else {
 					isRule = true
@@ -653,14 +667,20 @@
 				}
 			case '=':
 				if !isRule {
-					stmt = p.parseAssign(line, i, i+1)
+					stmt, err = p.parseAssign(line, i, i+1)
+					if err != nil {
+						return makefile{}, err
+					}
 				}
 				if equalIndex < 0 {
 					equalIndex = i
 				}
 			case '?', '+':
 				if !isRule && i+1 < len(line) && line[i+1] == '=' {
-					stmt = p.parseAssign(line, i, i+2)
+					stmt, err = p.parseAssign(line, i, i+2)
+					if err != nil {
+						return makefile{}, err
+					}
 				}
 			}
 			if stmt != nil {
@@ -669,40 +689,42 @@
 			}
 		}
 		if stmt == nil {
-			stmt = p.parseMaybeRule(line, equalIndex, semicolonIndex)
+			stmt, err = p.parseMaybeRule(line, equalIndex, semicolonIndex)
+			if err != nil {
+				return makefile{}, err
+			}
 			if stmt != nil {
 				p.addStatement(stmt)
 			}
 		}
 	}
-	return p.mk, nil
+	return p.mk, p.err
 }
 
-func defaultMakefile() string {
+func defaultMakefile() (string, error) {
 	candidates := []string{"GNUmakefile", "makefile", "Makefile"}
 	for _, filename := range candidates {
 		if exists(filename) {
-			return filename
+			return filename, nil
 		}
 	}
-	errorNoLocationExit("no targets specified and no makefile found.")
-	panic("") // Cannot be reached.
+	return "", errors.New("no targets specified and no makefile found")
 }
 
-func parseMakefileReader(rd io.Reader, name string, lineno int) (makefile, error) {
-	parser := newParser(rd, name)
-	parser.lineno = lineno
-	parser.elineno = lineno
+func parseMakefileReader(rd io.Reader, loc srcpos) (makefile, error) {
+	parser := newParser(rd, loc.filename)
+	parser.lineno = loc.lineno
+	parser.elineno = loc.lineno
 	parser.linenoFixed = true
 	return parser.parse()
 }
 
-func parseMakefileString(s string, name string, lineno int) (makefile, error) {
-	return parseMakefileReader(strings.NewReader(s), name, lineno)
+func parseMakefileString(s string, loc srcpos) (makefile, error) {
+	return parseMakefileReader(strings.NewReader(s), loc)
 }
 
-func parseMakefileBytes(s []byte, name string, lineno int) (makefile, error) {
-	return parseMakefileReader(bytes.NewReader(s), name, lineno)
+func parseMakefileBytes(s []byte, loc srcpos) (makefile, error) {
+	return parseMakefileReader(bytes.NewReader(s), loc)
 }
 
 type mkCacheEntry struct {
diff --git a/pathutil.go b/pathutil.go
index cb5eea9..eb7c2c0 100644
--- a/pathutil.go
+++ b/pathutil.go
@@ -36,7 +36,7 @@
 	m: make(map[string][]string),
 }
 
-func wildcardGlob(pat string) []string {
+func wildcardGlob(pat string) ([]string, error) {
 	// TODO(ukai): use find cache for glob if exists.
 	pattern := filepath.Clean(pat)
 	if pattern != pat {
@@ -48,9 +48,9 @@
 			// return pat.
 			_, err := os.Stat(pat)
 			if err != nil {
-				return nil
+				return nil, nil
 			}
-			return []string{pat}
+			return []string{pat}, nil
 		}
 		if strings.Contains(pattern[i+1:], "..") {
 			// We ask shell to expand a glob to avoid this.
@@ -66,18 +66,18 @@
 			for ws.Scan() {
 				files = append(files, string(ws.Bytes()))
 			}
-			return files
+			return files, nil
 		}
 		// prefix + meta + suffix, and suffix doesn't have '..'
 		prefix := pattern[:i]
 		i = strings.IndexAny(pat, "*?[")
 		if i < 0 {
-			panic(fmt.Sprintf("wildcard metachar mismatch? pattern=%q pat=%q", pattern, pat))
+			return nil, fmt.Errorf("wildcard metachar mismatch? pattern=%q pat=%q", pattern, pat)
 		}
 		oprefix := pat[:i]
 		matched, err := filepath.Glob(pattern)
 		if err != nil {
-			panic(err)
+			return nil, err
 		}
 		var files []string
 		for _, m := range matched {
@@ -88,16 +88,12 @@
 			}
 			files = append(files, file)
 		}
-		return files
+		return files, nil
 	}
-	files, err := filepath.Glob(pat)
-	if err != nil {
-		panic(err)
-	}
-	return files
+	return filepath.Glob(pat)
 }
 
-func wildcard(sw *ssvWriter, pat string) {
+func wildcard(sw *ssvWriter, pat string) error {
 	if UseWildcardCache {
 		// TODO(ukai): make sure it didn't chdir?
 		wildcardCache.mu.Lock()
@@ -107,10 +103,13 @@
 			for _, file := range files {
 				sw.WriteString(file)
 			}
-			return
+			return nil
 		}
 	}
-	files := wildcardGlob(pat)
+	files, err := wildcardGlob(pat)
+	if err != nil {
+		return err
+	}
 	for _, file := range files {
 		sw.WriteString(file)
 	}
@@ -119,6 +118,7 @@
 		wildcardCache.m[pat] = files
 		wildcardCache.mu.Unlock()
 	}
+	return nil
 }
 
 type fileInfo struct {
@@ -140,6 +140,7 @@
 	androidDefaultLeafNames = []string{"CleanSpec.mk", "Android.mk"}
 )
 
+// AndroidFindCacheInit initializes find cache for android build.
 func AndroidFindCacheInit(prunes, leafNames []string) {
 	if leafNames != nil {
 		androidDefaultLeafNames = leafNames
diff --git a/query.go b/query.go
index a02af99..f32bf2e 100644
--- a/query.go
+++ b/query.go
@@ -70,6 +70,7 @@
 	}
 }
 
+// Query queries q in g.
 func Query(w io.Writer, q string, g *DepGraph) {
 	if q == "$MAKEFILE_LIST" {
 		for _, mk := range g.accessedMks {
diff --git a/rule_parser.go b/rule_parser.go
index b7908ac..d71bd0f 100644
--- a/rule_parser.go
+++ b/rule_parser.go
@@ -56,6 +56,7 @@
 }
 
 type rule struct {
+	srcpos
 	outputs         []string
 	inputs          []string
 	orderOnlyInputs []string
@@ -63,11 +64,13 @@
 	isDoubleColon   bool
 	isSuffixRule    bool
 	cmds            []string
-	filename        string
-	lineno          int
 	cmdLineno       int
 }
 
+func (r *rule) cmdpos() srcpos {
+	return srcpos{filename: r.filename, lineno: r.cmdLineno}
+}
+
 func isPatternRule(s []byte) (pattern, bool) {
 	i := bytes.IndexByte(s, '%')
 	if i < 0 {
@@ -93,10 +96,10 @@
 	}
 }
 
-func (r *rule) parseVar(s []byte) *assignAST {
+func (r *rule) parseVar(s []byte) (*assignAST, error) {
 	eq := bytes.IndexByte(s, '=')
 	if eq <= 0 {
-		return nil
+		return nil, nil
 	}
 	rhs := trimLeftSpaceBytes(s[eq+1:])
 	var lhs []byte
@@ -116,10 +119,12 @@
 		lhs = trimSpaceBytes(s[:eq])
 		op = "="
 	}
-	assign := newAssignAST(nil, lhs, rhs, op)
-	assign.filename = r.filename
-	assign.lineno = r.lineno
-	return assign
+	assign, err := newAssignAST(nil, lhs, rhs, op)
+	if err != nil {
+		return nil, err
+	}
+	assign.srcpos = r.srcpos
+	return assign, nil
 }
 
 func (r *rule) parse(line []byte) (*assignAST, error) {
@@ -153,7 +158,11 @@
 	}
 
 	rest := line[index:]
-	if assign := r.parseVar(rest); assign != nil {
+	assign, err := r.parseVar(rest)
+	if err != nil {
+		return nil, err
+	}
+	if assign != nil {
 		return assign, nil
 	}
 	index = bytes.IndexByte(rest, ':')
diff --git a/serialize.go b/serialize.go
index aadad3f..cb6c815 100644
--- a/serialize.go
+++ b/serialize.go
@@ -46,7 +46,10 @@
 	valueTypeTmpval    = 't'
 )
 
+// JSON is a json loader/saver.
 var JSON LoadSaver
+
+// GOB is a gob loader/saver.
 var GOB LoadSaver
 
 func init() {
@@ -57,35 +60,46 @@
 type jsonLoadSaver struct{}
 type gobLoadSaver struct{}
 
-func dumpInt(w io.Writer, i int) {
+type dumpbuf struct {
+	w   bytes.Buffer
+	err error
+}
+
+func (d *dumpbuf) Int(i int) {
+	if d.err != nil {
+		return
+	}
 	v := int32(i)
-	err := binary.Write(w, binary.LittleEndian, &v)
-	if err != nil {
-		panic(err)
-	}
+	d.err = binary.Write(&d.w, binary.LittleEndian, &v)
 }
 
-func dumpString(w io.Writer, s string) {
-	dumpInt(w, len(s))
-	_, err := io.WriteString(w, s)
-	if err != nil {
-		panic(err)
+func (d *dumpbuf) Str(s string) {
+	if d.err != nil {
+		return
 	}
+	d.Int(len(s))
+	if d.err != nil {
+		return
+	}
+	_, d.err = io.WriteString(&d.w, s)
 }
 
-func dumpBytes(w io.Writer, b []byte) {
-	dumpInt(w, len(b))
-	_, err := w.Write(b)
-	if err != nil {
-		panic(err)
+func (d *dumpbuf) Bytes(b []byte) {
+	if d.err != nil {
+		return
 	}
+	d.Int(len(b))
+	if d.err != nil {
+		return
+	}
+	_, d.err = d.w.Write(b)
 }
 
-func dumpByte(w io.Writer, b byte) {
-	err := writeByte(w, b)
-	if err != nil {
-		panic(err)
+func (d *dumpbuf) Byte(b byte) {
+	if d.err != nil {
+		return
 	}
+	d.err = writeByte(&d.w, b)
 }
 
 type serializableVar struct {
@@ -124,21 +138,21 @@
 	Exports     map[string]bool
 }
 
-func encGob(v interface{}) string {
+func encGob(v interface{}) (string, error) {
 	var buf bytes.Buffer
 	e := gob.NewEncoder(&buf)
 	err := e.Encode(v)
 	if err != nil {
-		panic(err)
+		return "", err
 	}
-	return buf.String()
+	return buf.String(), nil
 }
 
-func encVar(k string, v Var) string {
-	var buf bytes.Buffer
-	dumpString(&buf, k)
-	v.dump(&buf)
-	return buf.String()
+func encVar(k string, v Var) (string, error) {
+	var dump dumpbuf
+	dump.Str(k)
+	v.dump(&dump)
+	return dump.w.String(), dump.err
 }
 
 type depNodesSerializer struct {
@@ -148,6 +162,7 @@
 	targets   []string
 	targetMap map[string]int
 	done      map[string]bool
+	err       error
 }
 
 func newDepNodesSerializer() *depNodesSerializer {
@@ -170,6 +185,9 @@
 }
 
 func (ns *depNodesSerializer) serializeDepNodes(nodes []*DepNode) {
+	if ns.err != nil {
+		return
+	}
 	for _, n := range nodes {
 		if ns.done[n.Output] {
 			continue
@@ -201,7 +219,11 @@
 			v := n.TargetSpecificVars[k]
 			sv := serializableTargetSpecificVar{Name: k, Value: v.serialize()}
 			//gob := encGob(sv)
-			gob := encVar(k, v)
+			gob, err := encVar(k, v)
+			if err != nil {
+				ns.err = err
+				return
+			}
 			id, present := ns.tsvMap[gob]
 			if !present {
 				id = len(ns.tsvs)
@@ -225,6 +247,9 @@
 			Lineno:             n.Lineno,
 		})
 		ns.serializeDepNodes(n.Deps)
+		if ns.err != nil {
+			return
+		}
 	}
 }
 
@@ -236,7 +261,7 @@
 	return r
 }
 
-func makeSerializableGraph(g *DepGraph, roots []string) serializableGraph {
+func makeSerializableGraph(g *DepGraph, roots []string) (serializableGraph, error) {
 	ns := newDepNodesSerializer()
 	ns.serializeDepNodes(g.nodes)
 	v := makeSerializableVars(g.vars)
@@ -248,12 +273,15 @@
 		Roots:       roots,
 		AccessedMks: g.accessedMks,
 		Exports:     g.exports,
-	}
+	}, ns.err
 }
 
 func (jsonLoadSaver) Save(g *DepGraph, filename string, roots []string) error {
 	startTime := time.Now()
-	sg := makeSerializableGraph(g, roots)
+	sg, err := makeSerializableGraph(g, roots)
+	if err != nil {
+		return err
+	}
 	o, err := json.MarshalIndent(sg, " ", " ")
 	if err != nil {
 		return err
@@ -285,12 +313,18 @@
 	var sg serializableGraph
 	{
 		startTime := time.Now()
-		sg = makeSerializableGraph(g, roots)
+		sg, err = makeSerializableGraph(g, roots)
+		if err != nil {
+			return err
+		}
 		logStats("gob serialize prepare time: %q", time.Since(startTime))
 	}
 	{
 		startTime := time.Now()
-		e.Encode(sg)
+		err = e.Encode(sg)
+		if err != nil {
+			return err
+		}
 		logStats("gob serialize output time: %q", time.Since(startTime))
 	}
 	err = f.Close()
@@ -309,9 +343,9 @@
 	return url.QueryEscape(filename)
 }
 
-func saveCache(g *DepGraph, roots []string) {
+func saveCache(g *DepGraph, roots []string) error {
 	if len(g.accessedMks) == 0 {
-		panic("No Makefile is read")
+		return fmt.Errorf("no Makefile is read")
 	}
 	cacheFile := cacheFilename(g.accessedMks[0].Filename, roots)
 	for _, mk := range g.accessedMks {
@@ -320,101 +354,164 @@
 			if exists(cacheFile) {
 				os.Remove(cacheFile)
 			}
-			return
+			return nil
 		}
 	}
-	GOB.Save(g, cacheFile, roots)
+	return GOB.Save(g, cacheFile, roots)
 }
 
-func deserializeSingleChild(sv serializableVar) Value {
+func deserializeSingleChild(sv serializableVar) (Value, error) {
 	if len(sv.Children) != 1 {
-		panic(fmt.Sprintf("unexpected number of children: %q", sv))
+		return nil, fmt.Errorf("unexpected number of children: %q", sv)
 	}
 	return deserializeVar(sv.Children[0])
 }
 
-func deserializeVar(sv serializableVar) (r Value) {
+func deserializeVar(sv serializableVar) (r Value, err error) {
 	switch sv.Type {
 	case "literal":
-		return literal(sv.V)
+		return literal(sv.V), nil
 	case "tmpval":
-		return tmpval([]byte(sv.V))
+		return tmpval([]byte(sv.V)), nil
 	case "expr":
 		var e expr
 		for _, v := range sv.Children {
-			e = append(e, deserializeVar(v))
+			dv, err := deserializeVar(v)
+			if err != nil {
+				return nil, err
+			}
+			e = append(e, dv)
 		}
-		return e
+		return e, nil
 	case "varref":
-		return &varref{varname: deserializeSingleChild(sv)}
+		dv, err := deserializeSingleChild(sv)
+		if err != nil {
+			return nil, err
+		}
+		return &varref{varname: dv}, nil
 	case "paramref":
 		v, err := strconv.Atoi(sv.V)
 		if err != nil {
-			panic(err)
+			return nil, err
 		}
-		return paramref(v)
+		return paramref(v), nil
 	case "varsubst":
-		return varsubst{
-			varname: deserializeVar(sv.Children[0]),
-			pat:     deserializeVar(sv.Children[1]),
-			subst:   deserializeVar(sv.Children[2]),
+		varname, err := deserializeVar(sv.Children[0])
+		if err != nil {
+			return nil, err
 		}
+		pat, err := deserializeVar(sv.Children[1])
+		if err != nil {
+			return nil, err
+		}
+		subst, err := deserializeVar(sv.Children[2])
+		if err != nil {
+			return nil, err
+		}
+		return varsubst{
+			varname: varname,
+			pat:     pat,
+			subst:   subst,
+		}, nil
 
 	case "func":
-		name := deserializeVar(sv.Children[0]).(literal)
+		dv, err := deserializeVar(sv.Children[0])
+		if err != nil {
+			return nil, err
+		}
+		name, ok := dv.(literal)
+		if !ok {
+			return nil, fmt.Errorf("func name is not literal %s: %T", dv, dv)
+		}
 		f := funcMap[string(name[1:])]()
 		f.AddArg(name)
 		for _, a := range sv.Children[1:] {
-			f.AddArg(deserializeVar(a))
+			dv, err := deserializeVar(a)
+			if err != nil {
+				return nil, err
+			}
+			f.AddArg(dv)
 		}
-		return f
+		return f, nil
 	case "funcEvalAssign":
+		rhs, err := deserializeVar(sv.Children[2])
+		if err != nil {
+			return nil, err
+		}
 		return &funcEvalAssign{
 			lhs: sv.Children[0].V,
 			op:  sv.Children[1].V,
-			rhs: deserializeVar(sv.Children[2]),
-		}
+			rhs: rhs,
+		}, nil
 	case "funcNop":
-		return &funcNop{expr: sv.V}
+		return &funcNop{expr: sv.V}, nil
 
 	case "simple":
 		return &simpleVar{
 			value:  sv.V,
 			origin: sv.Origin,
-		}
+		}, nil
 	case "recursive":
-		return &recursiveVar{
-			expr:   deserializeSingleChild(sv),
-			origin: sv.Origin,
+		expr, err := deserializeSingleChild(sv)
+		if err != nil {
+			return nil, err
 		}
+		return &recursiveVar{
+			expr:   expr,
+			origin: sv.Origin,
+		}, nil
 
 	case ":=", "=", "+=", "?=":
-		return &targetSpecificVar{
-			v:  deserializeSingleChild(sv).(Var),
-			op: sv.Type,
+		dv, err := deserializeSingleChild(sv)
+		if err != nil {
+			return nil, err
 		}
+		v, ok := dv.(Var)
+		if !ok {
+			return nil, fmt.Errorf("not var: target specific var %s %T", dv, dv)
+		}
+		return &targetSpecificVar{
+			v:  v,
+			op: sv.Type,
+		}, nil
 
 	default:
-		panic(fmt.Sprintf("unknown serialized variable type: %q", sv))
+		return nil, fmt.Errorf("unknown serialized variable type: %q", sv)
 	}
 }
 
-func deserializeVars(vars map[string]serializableVar) Vars {
+func deserializeVars(vars map[string]serializableVar) (Vars, error) {
 	r := make(Vars)
 	for k, v := range vars {
-		r[k] = deserializeVar(v).(Var)
+		dv, err := deserializeVar(v)
+		if err != nil {
+			return nil, err
+		}
+		vv, ok := dv.(Var)
+		if !ok {
+			return nil, fmt.Errorf("not var: %s: %T", dv, dv)
+		}
+		r[k] = vv
 	}
-	return r
+	return r, nil
 }
 
-func deserializeNodes(g serializableGraph) (r []*DepNode) {
+func deserializeNodes(g serializableGraph) (r []*DepNode, err error) {
 	nodes := g.Nodes
 	tsvs := g.Tsvs
 	targets := g.Targets
 	// Deserialize all TSVs first so that multiple rules can share memory.
 	var tsvValues []Var
 	for _, sv := range tsvs {
-		tsvValues = append(tsvValues, deserializeVar(sv.Value).(Var))
+		dv, err := deserializeVar(sv.Value)
+		if err != nil {
+			return nil, err
+		}
+		vv, ok := dv.(Var)
+		if !ok {
+			return nil, fmt.Errorf("not var: %s %T", dv, dv)
+		}
+		tsvValues = append(tsvValues, vv)
 	}
 
 	nodeMap := make(map[string]*DepNode)
@@ -450,20 +547,20 @@
 		for _, o := range n.Deps {
 			c, present := nodeMap[targets[o]]
 			if !present {
-				panic(fmt.Sprintf("unknown target: %d (%s)", o, targets[o]))
+				return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o])
 			}
 			d.Deps = append(d.Deps, c)
 		}
 		for _, o := range n.Parents {
 			c, present := nodeMap[targets[o]]
 			if !present {
-				panic(fmt.Sprintf("unknown target: %d (%s)", o, targets[o]))
+				return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o])
 			}
 			d.Parents = append(d.Parents, c)
 		}
 	}
 
-	return r
+	return r, nil
 }
 
 func human(n int) string {
@@ -576,18 +673,24 @@
 	showSerializedAccessedMksStats(g.AccessedMks)
 }
 
-func deserializeGraph(g serializableGraph) *DepGraph {
+func deserializeGraph(g serializableGraph) (*DepGraph, error) {
 	if LogFlag || StatsFlag {
 		showSerializedGraphStats(g)
 	}
-	nodes := deserializeNodes(g)
-	vars := deserializeVars(g.Vars)
+	nodes, err := deserializeNodes(g)
+	if err != nil {
+		return nil, err
+	}
+	vars, err := deserializeVars(g.Vars)
+	if err != nil {
+		return nil, err
+	}
 	return &DepGraph{
 		nodes:       nodes,
 		vars:        vars,
 		accessedMks: g.AccessedMks,
 		exports:     g.Exports,
-	}
+	}, nil
 }
 
 func (jsonLoadSaver) Load(filename string) (*DepGraph, error) {
@@ -604,7 +707,10 @@
 	if err != nil {
 		return nil, err
 	}
-	dg := deserializeGraph(g)
+	dg, err := deserializeGraph(g)
+	if err != nil {
+		return nil, err
+	}
 	logStats("gob deserialize time: %q", time.Since(startTime))
 	return dg, nil
 }
@@ -623,12 +729,15 @@
 	if err != nil {
 		return nil, err
 	}
-	dg := deserializeGraph(g)
+	dg, err := deserializeGraph(g)
+	if err != nil {
+		return nil, err
+	}
 	logStats("json deserialize time: %q", time.Since(startTime))
 	return dg, nil
 }
 
-func loadCache(makefile string, roots []string) *DepGraph {
+func loadCache(makefile string, roots []string) (*DepGraph, error) {
 	startTime := time.Now()
 	defer func() {
 		logStats("Cache lookup time: %q", time.Since(startTime))
@@ -637,37 +746,37 @@
 	filename := cacheFilename(makefile, roots)
 	if !exists(filename) {
 		logAlways("Cache not found")
-		return nil
+		return nil, fmt.Errorf("cache not found: %s", filename)
 	}
 
 	g, err := GOB.Load(filename)
 	if err != nil {
 		logAlways("Cache load error: %v", err)
-		return nil
+		return nil, err
 	}
 	for _, mk := range g.accessedMks {
 		if mk.State != fileExists && mk.State != fileNotExists {
-			panic(fmt.Sprintf("Internal error: broken state: %d", mk.State))
+			return nil, fmt.Errorf("internal error: broken state: %d", mk.State)
 		}
 		if mk.State == fileNotExists {
 			if exists(mk.Filename) {
 				logAlways("Cache expired: %s", mk.Filename)
-				return nil
+				return nil, fmt.Errorf("cache expired: %s", mk.Filename)
 			}
 		} else {
 			c, err := ioutil.ReadFile(mk.Filename)
 			if err != nil {
 				logAlways("Cache expired: %s", mk.Filename)
-				return nil
+				return nil, fmt.Errorf("cache expired: %s", mk.Filename)
 			}
 			h := sha1.Sum(c)
 			if !bytes.Equal(h[:], mk.Hash[:]) {
 				logAlways("Cache expired: %s", mk.Filename)
-				return nil
+				return nil, fmt.Errorf("cache expired: %s", mk.Filename)
 			}
 		}
 	}
 	g.isCached = true
 	logAlways("Cache found!")
-	return g
+	return g, nil
 }
diff --git a/shellutil.go b/shellutil.go
index 71a6d4e..29ce420 100644
--- a/shellutil.go
+++ b/shellutil.go
@@ -256,12 +256,16 @@
 	}
 }
 
-func (f *funcShellAndroidRot13) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellAndroidRot13) Eval(w io.Writer, ev *Evaluator) error {
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.v)
+	fargs, err := ev.args(abuf, f.v)
+	if err != nil {
+		return err
+	}
 	rot13(fargs[0])
 	w.Write(fargs[0])
 	freeBuf(abuf)
+	return nil
 }
 
 type funcShellAndroidFindFileInDir struct {
@@ -269,24 +273,26 @@
 	dir Value
 }
 
-func (f *funcShellAndroidFindFileInDir) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellAndroidFindFileInDir) Eval(w io.Writer, ev *Evaluator) error {
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.dir)
+	fargs, err := ev.args(abuf, f.dir)
+	if err != nil {
+		return err
+	}
 	dir := string(trimSpaceBytes(fargs[0]))
 	freeBuf(abuf)
 	logf("shellAndroidFindFileInDir %s => %s", f.dir.String(), dir)
 	if strings.Contains(dir, "..") {
 		logf("shellAndroidFindFileInDir contains ..: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	if !androidFindCache.ready() {
 		logf("shellAndroidFindFileInDir androidFindCache is not ready: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	sw := ssvWriter{w: w}
 	androidFindCache.findInDir(&sw, dir)
+	return nil
 }
 
 type funcShellAndroidFindExtFilesUnder struct {
@@ -296,9 +302,12 @@
 	ext   string
 }
 
-func (f *funcShellAndroidFindExtFilesUnder) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellAndroidFindExtFilesUnder) Eval(w io.Writer, ev *Evaluator) error {
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.chdir, f.roots)
+	fargs, err := ev.args(abuf, f.chdir, f.roots)
+	if err != nil {
+		return err
+	}
 	chdir := string(trimSpaceBytes(fargs[0]))
 	var roots []string
 	hasDotDot := false
@@ -314,13 +323,11 @@
 	logf("shellAndroidFindExtFilesUnder %s,%s => %s,%s", f.chdir.String(), f.roots.String(), chdir, roots)
 	if strings.Contains(chdir, "..") || hasDotDot {
 		logf("shellAndroidFindExtFilesUnder contains ..: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	if !androidFindCache.ready() {
 		logf("shellAndroidFindExtFilesUnder androidFindCache is not ready: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	buf := newBuf()
 	sw := ssvWriter{w: buf}
@@ -328,12 +335,12 @@
 		if !androidFindCache.findExtFilesUnder(&sw, chdir, root, f.ext) {
 			freeBuf(buf)
 			logf("shellAndroidFindExtFilesUnder androidFindCache couldn't handle: call original shell")
-			f.funcShell.Eval(w, ev)
-			return
+			return f.funcShell.Eval(w, ev)
 		}
 	}
 	w.Write(buf.Bytes())
 	freeBuf(buf)
+	return nil
 }
 
 type funcShellAndroidFindJavaResourceFileGroup struct {
@@ -341,24 +348,26 @@
 	dir Value
 }
 
-func (f *funcShellAndroidFindJavaResourceFileGroup) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellAndroidFindJavaResourceFileGroup) Eval(w io.Writer, ev *Evaluator) error {
 	abuf := newBuf()
-	fargs := ev.args(abuf, f.dir)
+	fargs, err := ev.args(abuf, f.dir)
+	if err != nil {
+		return err
+	}
 	dir := string(trimSpaceBytes(fargs[0]))
 	freeBuf(abuf)
 	logf("shellAndroidFindJavaResourceFileGroup %s => %s", f.dir.String(), dir)
 	if strings.Contains(dir, "..") {
 		logf("shellAndroidFindJavaResourceFileGroup contains ..: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	if !androidFindCache.ready() {
 		logf("shellAndroidFindJavaResourceFileGroup androidFindCache is not ready: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	sw := ssvWriter{w: w}
 	androidFindCache.findJavaResourceFileGroup(&sw, dir)
+	return nil
 }
 
 type funcShellAndroidFindleaves struct {
@@ -369,18 +378,20 @@
 	mindepth int
 }
 
-func (f *funcShellAndroidFindleaves) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellAndroidFindleaves) Eval(w io.Writer, ev *Evaluator) error {
 	if !androidFindCache.leavesReady() {
 		logf("shellAndroidFindleaves androidFindCache is not ready: call original shell")
-		f.funcShell.Eval(w, ev)
-		return
+		return f.funcShell.Eval(w, ev)
 	}
 	abuf := newBuf()
 	var params []Value
 	params = append(params, f.name)
 	params = append(params, f.dirlist)
 	params = append(params, f.prunes...)
-	fargs := ev.args(abuf, params...)
+	fargs, err := ev.args(abuf, params...)
+	if err != nil {
+		return err
+	}
 	name := string(trimSpaceBytes(fargs[0]))
 	var dirs []string
 	ws := newWordScanner(fargs[1])
@@ -388,8 +399,7 @@
 		dir := string(ws.Bytes())
 		if strings.Contains(dir, "..") {
 			logf("shellAndroidFindleaves contains .. in %s: call original shell", dir)
-			f.funcShell.Eval(w, ev)
-			return
+			return f.funcShell.Eval(w, ev)
 		}
 		dirs = append(dirs, dir)
 	}
@@ -403,9 +413,11 @@
 	for _, dir := range dirs {
 		androidFindCache.findleaves(&sw, dir, name, prunes, f.mindepth)
 	}
+	return nil
 }
 
 var (
+	// ShellDateTimestamp is an timestamp used for $(shell date).
 	ShellDateTimestamp time.Time
 	shellDateFormatRef = map[string]string{
 		"%Y": "2006",
@@ -442,6 +454,7 @@
 	}
 }
 
-func (f *funcShellDate) Eval(w io.Writer, ev *Evaluator) {
+func (f *funcShellDate) Eval(w io.Writer, ev *Evaluator) error {
 	fmt.Fprint(w, ShellDateTimestamp.Format(f.format))
+	return nil
 }
diff --git a/stats.go b/stats.go
index 6535869..2557888 100644
--- a/stats.go
+++ b/stats.go
@@ -39,10 +39,12 @@
 
 var traceEvent traceEventT
 
+// TraceEventStart starts trace event.
 func TraceEventStart(f io.WriteCloser) {
 	traceEvent.start(f)
 }
 
+// TraceEventStop stops trace event.
 func TraceEventStop() {
 	traceEvent.stop()
 }
@@ -147,6 +149,7 @@
 	s.mu.Unlock()
 }
 
+// DumpStats dumps statistics collected if EvalStatsFlag is set.
 func DumpStats() {
 	if !EvalStatsFlag {
 		return
diff --git a/testcase/gen_testcase_parse_benchmark.go b/testcase/gen_testcase_parse_benchmark.go
index a1d58ee..9e8c8ba 100644
--- a/testcase/gen_testcase_parse_benchmark.go
+++ b/testcase/gen_testcase_parse_benchmark.go
@@ -45,7 +45,10 @@
 	mk := string(data)
 	b.ResetTimer()
 	for i := 0; i < b.N; i++ {
-		parseMakefileString(mk, {{.Filename | printf "%q"}}, 0)
+		parseMakefileString(mk, srcpos{
+			filename: {{.Filename | printf "%q"}},
+			lineno: 0,
+		})
 	}
 }
 `))
diff --git a/var.go b/var.go
index f952e6e..d8f4c19 100644
--- a/var.go
+++ b/var.go
@@ -20,10 +20,11 @@
 	"io"
 )
 
+// Var is an interface of make variable.
 type Var interface {
 	Value
-	Append(*Evaluator, string) Var
-	AppendVar(*Evaluator, Value) Var
+	Append(*Evaluator, string) (Var, error)
+	AppendVar(*Evaluator, Value) (Var, error)
 	Flavor() string
 	Origin() string
 	IsDefined() bool
@@ -34,17 +35,25 @@
 	op string
 }
 
-func (v *targetSpecificVar) Append(ev *Evaluator, s string) Var {
-	return &targetSpecificVar{
-		v:  v.v.Append(ev, s),
-		op: v.op,
+func (v *targetSpecificVar) Append(ev *Evaluator, s string) (Var, error) {
+	nv, err := v.v.Append(ev, s)
+	if err != nil {
+		return nil, err
 	}
+	return &targetSpecificVar{
+		v:  nv,
+		op: v.op,
+	}, nil
 }
-func (v *targetSpecificVar) AppendVar(ev *Evaluator, v2 Value) Var {
-	return &targetSpecificVar{
-		v:  v.v.AppendVar(ev, v2),
-		op: v.op,
+func (v *targetSpecificVar) AppendVar(ev *Evaluator, v2 Value) (Var, error) {
+	nv, err := v.v.AppendVar(ev, v2)
+	if err != nil {
+		return nil, err
 	}
+	return &targetSpecificVar{
+		v:  nv,
+		op: v.op,
+	}, nil
 }
 func (v *targetSpecificVar) Flavor() string {
 	return v.v.Flavor()
@@ -61,8 +70,8 @@
 	return v.v.String()
 	// return v.v.String() + " (op=" + v.op + ")"
 }
-func (v *targetSpecificVar) Eval(w io.Writer, ev *Evaluator) {
-	v.v.Eval(w, ev)
+func (v *targetSpecificVar) Eval(w io.Writer, ev *Evaluator) error {
+	return v.v.Eval(w, ev)
 }
 
 func (v *targetSpecificVar) serialize() serializableVar {
@@ -72,10 +81,10 @@
 	}
 }
 
-func (v *targetSpecificVar) dump(w io.Writer) {
-	dumpByte(w, valueTypeTSV)
-	dumpString(w, v.op)
-	v.v.dump(w)
+func (v *targetSpecificVar) dump(d *dumpbuf) {
+	d.Byte(valueTypeTSV)
+	d.Str(v.op)
+	v.v.dump(d)
 }
 
 type simpleVar struct {
@@ -88,8 +97,9 @@
 func (v *simpleVar) IsDefined() bool { return true }
 
 func (v *simpleVar) String() string { return v.value }
-func (v *simpleVar) Eval(w io.Writer, ev *Evaluator) {
+func (v *simpleVar) Eval(w io.Writer, ev *Evaluator) error {
 	io.WriteString(w, v.value)
+	return nil
 }
 func (v *simpleVar) serialize() serializableVar {
 	return serializableVar{
@@ -98,34 +108,40 @@
 		Origin: v.origin,
 	}
 }
-func (v *simpleVar) dump(w io.Writer) {
-	dumpByte(w, valueTypeSimple)
-	dumpString(w, v.value)
-	dumpString(w, v.origin)
+func (v *simpleVar) dump(d *dumpbuf) {
+	d.Byte(valueTypeSimple)
+	d.Str(v.value)
+	d.Str(v.origin)
 }
 
-func (v *simpleVar) Append(ev *Evaluator, s string) Var {
+func (v *simpleVar) Append(ev *Evaluator, s string) (Var, error) {
 	val, _, err := parseExpr([]byte(s), nil, false)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	abuf := newBuf()
 	io.WriteString(abuf, v.value)
 	writeByte(abuf, ' ')
-	val.Eval(abuf, ev)
+	err = val.Eval(abuf, ev)
+	if err != nil {
+		return nil, err
+	}
 	v.value = abuf.String()
 	freeBuf(abuf)
-	return v
+	return v, nil
 }
 
-func (v *simpleVar) AppendVar(ev *Evaluator, val Value) Var {
+func (v *simpleVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
 	abuf := newBuf()
 	io.WriteString(abuf, v.value)
 	writeByte(abuf, ' ')
-	val.Eval(abuf, ev)
+	err := val.Eval(abuf, ev)
+	if err != nil {
+		return nil, err
+	}
 	v.value = abuf.String()
 	freeBuf(abuf)
-	return v
+	return v, nil
 }
 
 type automaticVar struct {
@@ -137,34 +153,41 @@
 func (v *automaticVar) IsDefined() bool { return true }
 
 func (v *automaticVar) String() string { return string(v.value) }
-func (v *automaticVar) Eval(w io.Writer, ev *Evaluator) {
+func (v *automaticVar) Eval(w io.Writer, ev *Evaluator) error {
 	w.Write(v.value)
+	return nil
 }
 func (v *automaticVar) serialize() serializableVar {
-	panic(fmt.Sprintf("cannnot serialize automatic var:%s", v.value))
+	return serializableVar{Type: ""}
 }
-func (v *automaticVar) dump(w io.Writer) {
-	panic(fmt.Sprintf("cannnot dump automatic var:%s", v.value))
+func (v *automaticVar) dump(d *dumpbuf) {
+	d.err = fmt.Errorf("cannnot dump automatic var:%s", v.value)
 }
 
-func (v *automaticVar) Append(ev *Evaluator, s string) Var {
+func (v *automaticVar) Append(ev *Evaluator, s string) (Var, error) {
 	val, _, err := parseExpr([]byte(s), nil, false)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	buf := bytes.NewBuffer(v.value)
 	buf.WriteByte(' ')
-	val.Eval(buf, ev)
+	err = val.Eval(buf, ev)
+	if err != nil {
+		return nil, err
+	}
 	v.value = buf.Bytes()
-	return v
+	return v, nil
 }
 
-func (v *automaticVar) AppendVar(ev *Evaluator, val Value) Var {
+func (v *automaticVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
 	buf := bytes.NewBuffer(v.value)
 	buf.WriteByte(' ')
-	val.Eval(buf, ev)
+	err := val.Eval(buf, ev)
+	if err != nil {
+		return nil, err
+	}
 	v.value = buf.Bytes()
-	return v
+	return v, nil
 }
 
 type recursiveVar struct {
@@ -177,8 +200,9 @@
 func (v *recursiveVar) IsDefined() bool { return true }
 
 func (v *recursiveVar) String() string { return v.expr.String() }
-func (v *recursiveVar) Eval(w io.Writer, ev *Evaluator) {
+func (v *recursiveVar) Eval(w io.Writer, ev *Evaluator) error {
 	v.expr.Eval(w, ev)
+	return nil
 }
 func (v *recursiveVar) serialize() serializableVar {
 	return serializableVar{
@@ -187,13 +211,13 @@
 		Origin:   v.origin,
 	}
 }
-func (v *recursiveVar) dump(w io.Writer) {
-	dumpByte(w, valueTypeRecursive)
-	v.expr.dump(w)
-	dumpString(w, v.origin)
+func (v *recursiveVar) dump(d *dumpbuf) {
+	d.Byte(valueTypeRecursive)
+	v.expr.dump(d)
+	d.Str(v.origin)
 }
 
-func (v *recursiveVar) Append(_ *Evaluator, s string) Var {
+func (v *recursiveVar) Append(_ *Evaluator, s string) (Var, error) {
 	var exp expr
 	if e, ok := v.expr.(expr); ok {
 		exp = append(e, literal(" "))
@@ -202,7 +226,7 @@
 	}
 	sv, _, err := parseExpr([]byte(s), nil, true)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	if aexpr, ok := sv.(expr); ok {
 		exp = append(exp, aexpr...)
@@ -210,20 +234,20 @@
 		exp = append(exp, sv)
 	}
 	v.expr = exp
-	return v
+	return v, nil
 }
 
-func (v *recursiveVar) AppendVar(ev *Evaluator, val Value) Var {
+func (v *recursiveVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
 	var buf bytes.Buffer
 	buf.WriteString(v.expr.String())
 	buf.WriteByte(' ')
 	buf.WriteString(val.String())
 	e, _, err := parseExpr(buf.Bytes(), nil, true)
 	if err != nil {
-		panic(err)
+		return nil, err
 	}
 	v.expr = e
-	return v
+	return v, nil
 }
 
 type undefinedVar struct{}
@@ -232,25 +256,28 @@
 func (undefinedVar) Origin() string  { return "undefined" }
 func (undefinedVar) IsDefined() bool { return false }
 func (undefinedVar) String() string  { return "" }
-func (undefinedVar) Eval(_ io.Writer, _ *Evaluator) {
+func (undefinedVar) Eval(_ io.Writer, _ *Evaluator) error {
+	return nil
 }
 func (undefinedVar) serialize() serializableVar {
 	return serializableVar{Type: "undefined"}
 }
-func (undefinedVar) dump(w io.Writer) {
-	dumpByte(w, valueTypeUndefined)
+func (undefinedVar) dump(d *dumpbuf) {
+	d.Byte(valueTypeUndefined)
 }
 
-func (undefinedVar) Append(*Evaluator, string) Var {
-	return undefinedVar{}
+func (undefinedVar) Append(*Evaluator, string) (Var, error) {
+	return undefinedVar{}, nil
 }
 
-func (undefinedVar) AppendVar(_ *Evaluator, val Value) Var {
-	return undefinedVar{}
+func (undefinedVar) AppendVar(_ *Evaluator, val Value) (Var, error) {
+	return undefinedVar{}, nil
 }
 
+// Vars is a map for make variables.
 type Vars map[string]Var
 
+// Lookup looks up named make variable.
 func (vt Vars) Lookup(name string) Var {
 	if v, ok := vt[name]; ok {
 		return v
@@ -258,6 +285,7 @@
 	return undefinedVar{}
 }
 
+// Assign assigns v to name.
 func (vt Vars) Assign(name string, v Var) {
 	switch v.Origin() {
 	case "override", "environment override":
@@ -271,12 +299,14 @@
 	vt[name] = v
 }
 
+// NewVars creates new Vars.
 func NewVars(vt Vars) Vars {
 	r := make(Vars)
 	r.Merge(vt)
 	return r
 }
 
+// Merge merges vt2 into vt.
 func (vt Vars) Merge(vt2 Vars) {
 	for k, v := range vt2 {
 		vt[k] = v
diff --git a/worker.go b/worker.go
index 8948ac6..9d3fd09 100644
--- a/worker.go
+++ b/worker.go
@@ -45,8 +45,9 @@
 }
 
 type jobResult struct {
-	j *job
-	w *worker
+	j   *job
+	w   *worker
+	err error
 }
 
 type newDep struct {
@@ -99,8 +100,8 @@
 	for !done {
 		select {
 		case j := <-w.jobChan:
-			j.build()
-			w.wm.ReportResult(w, j)
+			err := j.build()
+			w.wm.ReportResult(w, j, err)
 		case done = <-w.waitChan:
 		}
 	}
@@ -116,19 +117,22 @@
 	<-w.doneChan
 }
 
-func evalCmd(ev *Evaluator, r runner, s string) []runner {
+func evalCmd(ev *Evaluator, r runner, s string) ([]runner, error) {
 	r = newRunner(r, s)
 	if strings.IndexByte(r.cmd, '$') < 0 {
 		// fast path
-		return []runner{r}
+		return []runner{r}, nil
 	}
 	// TODO(ukai): parse once more earlier?
 	expr, _, err := parseExpr([]byte(r.cmd), nil, false)
 	if err != nil {
-		panic(fmt.Errorf("parse cmd %q: %v", r.cmd, err))
+		return nil, ev.errorf("parse cmd %q: %v", r.cmd, err)
 	}
 	buf := newBuf()
-	expr.Eval(buf, ev)
+	err = expr.Eval(buf, ev)
+	if err != nil {
+		return nil, err
+	}
 	cmds := buf.String()
 	freeBuf(buf)
 	var runners []runner
@@ -140,7 +144,7 @@
 			runners = append(runners, newRunner(r, cmd))
 		}
 	}
-	return runners
+	return runners, nil
 }
 
 func newRunner(r runner, s string) runner {
@@ -189,9 +193,9 @@
 	return err
 }
 
-func (j *job) createRunners() []runner {
-	runners, _ := j.ex.createRunners(j.n, false)
-	return runners
+func (j *job) createRunners() ([]runner, error) {
+	runners, _, err := j.ex.createRunners(j.n, false)
+	return runners, err
 }
 
 // TODO(ukai): use time.Time?
@@ -203,7 +207,7 @@
 	return st.ModTime().Unix()
 }
 
-func (j *job) build() {
+func (j *job) build() error {
 	if j.n.IsPhony {
 		j.outputTs = -2 // trigger cmd even if all inputs don't exist.
 	} else {
@@ -212,26 +216,28 @@
 
 	if !j.n.HasRule {
 		if j.outputTs >= 0 || j.n.IsPhony {
-			return
+			return nil
 		}
 		if len(j.parents) == 0 {
-			errorNoLocationExit("*** No rule to make target %q.", j.n.Output)
-		} else {
-			errorNoLocationExit("*** No rule to make target %q, needed by %q.", j.n.Output, j.parents[0].n.Output)
+			return fmt.Errorf("*** No rule to make target %q.", j.n.Output)
 		}
-		errorNoLocationExit("no rule to make target %q", j.n.Output)
+		return fmt.Errorf("*** No rule to make target %q, needed by %q.", j.n.Output, j.parents[0].n.Output)
 	}
 
 	if j.outputTs >= j.depsTs {
 		// TODO: stats.
-		return
+		return nil
 	}
 
-	for _, r := range j.createRunners() {
+	rr, err := j.createRunners()
+	if err != nil {
+		return err
+	}
+	for _, r := range rr {
 		err := r.run(j.n.Output)
 		if err != nil {
 			exit := exitStatus(err)
-			errorNoLocationExit("[%s] Error %d: %v", j.n.Output, exit, err)
+			return fmt.Errorf("[%s] Error %d: %v", j.n.Output, exit, err)
 		}
 	}
 
@@ -243,21 +249,26 @@
 			j.outputTs = time.Now().Unix()
 		}
 	}
+	return nil
 }
 
-func (wm *workerManager) handleJobs() {
+func (wm *workerManager) handleJobs() error {
 	for {
 		if wm.para == nil && len(wm.freeWorkers) == 0 {
-			return
+			return nil
 		}
 		if wm.readyQueue.Len() == 0 {
-			return
+			return nil
 		}
 		j := heap.Pop(&wm.readyQueue).(*job)
 		logf("run: %s", j.n.Output)
 
 		if wm.para != nil {
-			j.runners = j.createRunners()
+			var err error
+			j.runners, err = j.createRunners()
+			if err != nil {
+				return err
+			}
 			if len(j.runners) == 0 {
 				wm.updateParents(j)
 				wm.finishCnt++
@@ -294,7 +305,7 @@
 	resultChan  chan jobResult
 	newDepChan  chan newDep
 	waitChan    chan bool
-	doneChan    chan bool
+	doneChan    chan error
 	freeWorkers []*worker
 	busyWorkers map[*worker]bool
 	ex          *Executor
@@ -305,21 +316,25 @@
 	finishCnt int
 }
 
-func newWorkerManager(numJobs int, paraPath string) *workerManager {
+func newWorkerManager(numJobs int, paraPath string) (*workerManager, error) {
 	wm := &workerManager{
 		maxJobs:     numJobs,
 		jobChan:     make(chan *job),
 		resultChan:  make(chan jobResult),
 		newDepChan:  make(chan newDep),
 		waitChan:    make(chan bool),
-		doneChan:    make(chan bool),
+		doneChan:    make(chan error),
 		busyWorkers: make(map[*worker]bool),
 	}
 
 	if paraPath != "" {
 		wm.runnings = make(map[string]*job)
 		wm.paraChan = make(chan *paraResult)
-		wm.para = newParaWorker(wm.paraChan, numJobs, paraPath)
+		var err error
+		wm.para, err = newParaWorker(wm.paraChan, numJobs, paraPath)
+		if err != nil {
+			return nil, err
+		}
 		go wm.para.Run()
 	} else {
 		wm.busyWorkers = make(map[*worker]bool)
@@ -331,7 +346,7 @@
 	}
 	heap.Init(&wm.readyQueue)
 	go wm.Run()
-	return wm
+	return wm, nil
 }
 
 func exitStatus(err error) int {
@@ -363,7 +378,7 @@
 	if j.numDeps < 0 {
 		neededBy.numDeps--
 		if neededBy.id > 0 {
-			panic("already in WM... can this happen?")
+			panic("FIXME: already in WM... can this happen?")
 		}
 	} else {
 		j.parents = append(j.parents, neededBy)
@@ -372,6 +387,8 @@
 
 func (wm *workerManager) Run() {
 	done := false
+	var err error
+Loop:
 	for wm.hasTodo() || len(wm.busyWorkers) > 0 || len(wm.runnings) > 0 || !done {
 		select {
 		case j := <-wm.jobChan:
@@ -385,6 +402,10 @@
 			wm.freeWorkers = append(wm.freeWorkers, jr.w)
 			wm.updateParents(jr.j)
 			wm.finishCnt++
+			if jr.err != nil {
+				err = jr.err
+				break Loop
+			}
 		case af := <-wm.newDepChan:
 			wm.handleNewDep(af.j, af.neededBy)
 			logf("dep: %s (%d) %s", af.neededBy.n.Output, af.neededBy.numDeps, af.j.n.Output)
@@ -406,7 +427,10 @@
 			}
 		case done = <-wm.waitChan:
 		}
-		wm.handleJobs()
+		err = wm.handleJobs()
+		if err != nil {
+			break Loop
+		}
 
 		if wm.para != nil {
 			numBusy := len(wm.runnings)
@@ -421,7 +445,10 @@
 
 	if wm.para != nil {
 		logf("Wait for para to finish")
-		wm.para.Wait()
+		err := wm.para.Wait()
+		if err != nil {
+			logf("para failed: %v", err)
+		}
 	} else {
 		for _, w := range wm.freeWorkers {
 			w.Wait()
@@ -430,22 +457,22 @@
 			w.Wait()
 		}
 	}
-	wm.doneChan <- true
+	wm.doneChan <- err
 }
 
 func (wm *workerManager) PostJob(j *job) {
 	wm.jobChan <- j
 }
 
-func (wm *workerManager) ReportResult(w *worker, j *job) {
-	wm.resultChan <- jobResult{w: w, j: j}
+func (wm *workerManager) ReportResult(w *worker, j *job, err error) {
+	wm.resultChan <- jobResult{w: w, j: j, err: err}
 }
 
 func (wm *workerManager) ReportNewDep(j *job, neededBy *job) {
 	wm.newDepChan <- newDep{j: j, neededBy: neededBy}
 }
 
-func (wm *workerManager) Wait() {
+func (wm *workerManager) Wait() error {
 	wm.waitChan <- true
-	<-wm.doneChan
+	return <-wm.doneChan
 }