aboutsummaryrefslogtreecommitdiffstats
path: root/golang/kati
diff options
context:
space:
mode:
authorDan Willemsen <dwillemsen@google.com>2020-06-26 19:20:26 -0700
committerGitHub <noreply@github.com>2020-06-26 19:20:26 -0700
commit10cc982b563c19890872b73e6d8fb44aeda646ae (patch)
tree6b5075e832cbdf2a7996a25a26659363527b6e4c /golang/kati
parent003cf51e9b6da48063c90cf4c6710fde103c9c4a (diff)
parent979e7ae6e417ae4ee45e835104b66191ae16a14c (diff)
downloadplatform_build_kati-10cc982b563c19890872b73e6d8fb44aeda646ae.tar.gz
platform_build_kati-10cc982b563c19890872b73e6d8fb44aeda646ae.tar.bz2
platform_build_kati-10cc982b563c19890872b73e6d8fb44aeda646ae.zip
Merge pull request #199 from danw/refactor
Refactor source tree into directories
Diffstat (limited to 'golang/kati')
-rw-r--r--golang/kati/ast.go178
-rw-r--r--golang/kati/bootstrap.go53
-rw-r--r--golang/kati/buf.go210
-rw-r--r--golang/kati/buf_test.go69
-rw-r--r--golang/kati/dep.go596
-rw-r--r--golang/kati/depgraph.go232
-rw-r--r--golang/kati/doc.go22
-rw-r--r--golang/kati/eval.go727
-rw-r--r--golang/kati/evalcmd.go369
-rw-r--r--golang/kati/exec.go203
-rw-r--r--golang/kati/expr.go767
-rw-r--r--golang/kati/expr_test.go308
-rw-r--r--golang/kati/fileutil.go62
-rw-r--r--golang/kati/flags.go29
-rw-r--r--golang/kati/func.go1464
-rw-r--r--golang/kati/func_test.go76
-rw-r--r--golang/kati/log.go39
-rw-r--r--golang/kati/ninja.go777
-rw-r--r--golang/kati/ninja_test.go202
-rw-r--r--golang/kati/parser.go821
-rw-r--r--golang/kati/pathutil.go945
-rw-r--r--golang/kati/pathutil_test.go800
-rw-r--r--golang/kati/query.go99
-rw-r--r--golang/kati/rule_parser.go267
-rw-r--r--golang/kati/rule_parser_test.go223
-rw-r--r--golang/kati/serialize.go796
-rw-r--r--golang/kati/shellutil.go232
-rw-r--r--golang/kati/shellutil_test.go102
-rw-r--r--golang/kati/stats.go200
-rw-r--r--golang/kati/strutil.go430
-rw-r--r--golang/kati/strutil_test.go253
-rw-r--r--golang/kati/symtab.go42
-rw-r--r--golang/kati/var.go371
-rw-r--r--golang/kati/version.go17
-rw-r--r--golang/kati/worker.go368
35 files changed, 12349 insertions, 0 deletions
diff --git a/golang/kati/ast.go b/golang/kati/ast.go
new file mode 100644
index 0000000..2b43176
--- /dev/null
+++ b/golang/kati/ast.go
@@ -0,0 +1,178 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "strings"
+
+ "github.com/golang/glog"
+)
+
+type ast interface {
+ eval(*Evaluator) error
+ show()
+}
+
+type assignAST struct {
+ srcpos
+ lhs Value
+ rhs Value
+ op string
+ opt string // "override", "export"
+}
+
+func (ast *assignAST) eval(ev *Evaluator) error {
+ return ev.evalAssign(ast)
+}
+
+func (ast *assignAST) evalRHS(ev *Evaluator, lhs string) (Var, error) {
+ origin := "file"
+ if ast.filename == bootstrapMakefileName {
+ origin = "default"
+ }
+ if ast.opt == "override" {
+ origin = "override"
+ }
+ // TODO(ukai): handle ast.opt == "export"
+ switch ast.op {
+ case ":=":
+ switch v := ast.rhs.(type) {
+ case literal:
+ return &simpleVar{value: []string{v.String()}, origin: origin}, nil
+ case tmpval:
+ return &simpleVar{value: []string{v.String()}, origin: origin}, nil
+ default:
+ var buf evalBuffer
+ buf.resetSep()
+ err := v.Eval(&buf, ev)
+ if err != nil {
+ return nil, err
+ }
+ return &simpleVar{value: []string{buf.String()}, origin: origin}, nil
+ }
+ case "=":
+ return &recursiveVar{expr: ast.rhs, origin: origin}, nil
+ case "+=":
+ prev := ev.lookupVarInCurrentScope(lhs)
+ if !prev.IsDefined() {
+ return &recursiveVar{expr: ast.rhs, origin: origin}, nil
+ }
+ return prev.AppendVar(ev, ast.rhs)
+ case "?=":
+ prev := ev.lookupVarInCurrentScope(lhs)
+ if prev.IsDefined() {
+ return prev, nil
+ }
+ return &recursiveVar{expr: ast.rhs, origin: origin}, nil
+ }
+ return nil, ast.errorf("unknown assign op: %q", ast.op)
+}
+
+func (ast *assignAST) show() {
+ glog.Infof("%s %s %s %q", ast.opt, ast.lhs, ast.op, ast.rhs)
+}
+
+// maybeRuleAST is an ast for rule line.
+// Note we cannot be sure what this is, until all variables in |expr|
+// are expanded.
+type maybeRuleAST struct {
+ srcpos
+ isRule bool // found literal ':'
+ expr Value
+ assign *assignAST // target specific var
+ semi []byte // after ';' if ';' exists
+}
+
+func (ast *maybeRuleAST) eval(ev *Evaluator) error {
+ return ev.evalMaybeRule(ast)
+}
+
+func (ast *maybeRuleAST) show() {
+ glog.Info(ast.expr)
+}
+
+type commandAST struct {
+ srcpos
+ cmd string
+}
+
+func (ast *commandAST) eval(ev *Evaluator) error {
+ return ev.evalCommand(ast)
+}
+
+func (ast *commandAST) show() {
+ glog.Infof("\t%s", strings.Replace(ast.cmd, "\n", `\n`, -1))
+}
+
+type includeAST struct {
+ srcpos
+ expr string
+ op string
+}
+
+func (ast *includeAST) eval(ev *Evaluator) error {
+ return ev.evalInclude(ast)
+}
+
+func (ast *includeAST) show() {
+ glog.Infof("include %s", ast.expr)
+}
+
+type ifAST struct {
+ srcpos
+ op string
+ lhs Value
+ rhs Value // Empty if |op| is ifdef or ifndef.
+ trueStmts []ast
+ falseStmts []ast
+}
+
+func (ast *ifAST) eval(ev *Evaluator) error {
+ return ev.evalIf(ast)
+}
+
+func (ast *ifAST) show() {
+ // TODO
+ glog.Info("if")
+}
+
+type exportAST struct {
+ srcpos
+ expr []byte
+ hasEqual bool
+ export bool
+}
+
+func (ast *exportAST) eval(ev *Evaluator) error {
+ return ev.evalExport(ast)
+}
+
+func (ast *exportAST) show() {
+ // TODO
+ glog.Info("export")
+}
+
+type vpathAST struct {
+ srcpos
+ expr Value
+}
+
+func (ast *vpathAST) eval(ev *Evaluator) error {
+ return ev.evalVpath(ast)
+}
+
+func (ast *vpathAST) show() {
+ glog.Infof("vpath %s", ast.expr.String())
+}
diff --git a/golang/kati/bootstrap.go b/golang/kati/bootstrap.go
new file mode 100644
index 0000000..9ef396a
--- /dev/null
+++ b/golang/kati/bootstrap.go
@@ -0,0 +1,53 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "path/filepath"
+ "strings"
+)
+
+const bootstrapMakefileName = "*bootstrap*"
+
+func bootstrapMakefile(targets []string) (makefile, error) {
+ bootstrap := `
+CC?=cc
+CXX?=g++
+AR?=ar
+MAKE?=kati
+# Pretend to be GNU make 3.81, for compatibility.
+MAKE_VERSION?=3.81
+KATI?=kati
+SHELL=/bin/sh
+# TODO: Add more builtin vars.
+
+# http://www.gnu.org/software/make/manual/make.html#Catalogue-of-Rules
+# The document above is actually not correct. See default.c:
+# http://git.savannah.gnu.org/cgit/make.git/tree/default.c?id=4.1
+.c.o:
+ $(CC) $(CFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $<
+.cc.o:
+ $(CXX) $(CXXFLAGS) $(CPPFLAGS) $(TARGET_ARCH) -c -o $@ $<
+# TODO: Add more builtin rules.
+`
+ bootstrap += fmt.Sprintf("MAKECMDGOALS:=%s\n", strings.Join(targets, " "))
+ cwd, err := filepath.Abs(".")
+ if err != nil {
+ return makefile{}, err
+ }
+ bootstrap += fmt.Sprintf("CURDIR:=%s\n", cwd)
+ return parseMakefileString(bootstrap, srcpos{bootstrapMakefileName, 0})
+}
diff --git a/golang/kati/buf.go b/golang/kati/buf.go
new file mode 100644
index 0000000..737e7c9
--- /dev/null
+++ b/golang/kati/buf.go
@@ -0,0 +1,210 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "io"
+ "sync"
+)
+
+var (
+ ebufFree = sync.Pool{
+ New: func() interface{} { return new(evalBuffer) },
+ }
+ wbufFree = sync.Pool{
+ New: func() interface{} { return new(wordBuffer) },
+ }
+)
+
+func writeByte(w io.Writer, b byte) error {
+ if bw, ok := w.(io.ByteWriter); ok {
+ return bw.WriteByte(b)
+ }
+ _, err := w.Write([]byte{b})
+ return err
+}
+
+// use io.WriteString to stringWrite.
+
+type ssvWriter struct {
+ io.Writer
+ sep bool
+}
+
+func (w *ssvWriter) writeWord(word []byte) {
+ if w.sep {
+ writeByte(w.Writer, ' ')
+ }
+ w.sep = true
+ w.Writer.Write(word)
+}
+
+func (w *ssvWriter) writeWordString(word string) {
+ if w.sep {
+ writeByte(w.Writer, ' ')
+ }
+ w.sep = true
+ io.WriteString(w.Writer, word)
+}
+
+func (w *ssvWriter) resetSep() {
+ w.sep = false
+}
+
+type buffer struct {
+ buf []byte
+ bootstrap [64]byte // memory to hold first slice
+}
+
+func (b *buffer) Write(data []byte) (int, error) {
+ b.buf = append(b.buf, data...)
+ return len(data), nil
+}
+
+func (b *buffer) WriteByte(c byte) error {
+ b.buf = append(b.buf, c)
+ return nil
+}
+
+func (b *buffer) WriteString(s string) (int, error) {
+ b.buf = append(b.buf, []byte(s)...)
+ return len(s), nil
+}
+
+func (b *buffer) Bytes() []byte { return b.buf }
+func (b *buffer) Len() int { return len(b.buf) }
+func (b *buffer) String() string { return string(b.buf) }
+
+func (b *buffer) Reset() {
+ if b.buf == nil {
+ b.buf = b.bootstrap[:0]
+ }
+ b.buf = b.buf[:0]
+}
+
+type evalBuffer struct {
+ buffer
+ ssvWriter
+ args [][]byte
+}
+
+func newEbuf() *evalBuffer {
+ buf := ebufFree.Get().(*evalBuffer)
+ buf.Reset()
+ return buf
+}
+
+func (buf *evalBuffer) release() {
+ if cap(buf.Bytes()) > 1024 {
+ return
+ }
+ buf.Reset()
+ buf.args = buf.args[:0]
+ ebufFree.Put(buf)
+}
+
+func (b *evalBuffer) Reset() {
+ b.buffer.Reset()
+ b.resetSep()
+}
+
+func (b *evalBuffer) resetSep() {
+ if b.ssvWriter.Writer == nil {
+ b.ssvWriter.Writer = &b.buffer
+ }
+ b.ssvWriter.resetSep()
+}
+
+type wordBuffer struct {
+ buf buffer
+ words [][]byte
+}
+
+func newWbuf() *wordBuffer {
+ buf := wbufFree.Get().(*wordBuffer)
+ buf.Reset()
+ return buf
+}
+
+func (buf *wordBuffer) release() {
+ if cap(buf.Bytes()) > 1024 {
+ return
+ }
+ buf.Reset()
+ wbufFree.Put(buf)
+}
+
+func (wb *wordBuffer) Write(data []byte) (int, error) {
+ if len(data) == 0 {
+ return 0, nil
+ }
+ off := len(wb.buf.buf)
+ var cont bool
+ if !isWhitespace(rune(data[0])) && len(wb.buf.buf) > 0 {
+ cont = !isWhitespace(rune(wb.buf.buf[off-1]))
+ }
+ ws := newWordScanner(data)
+ for ws.Scan() {
+ if cont {
+ word := wb.words[len(wb.words)-1]
+ wb.words = wb.words[:len(wb.words)-1]
+ wb.buf.buf = wb.buf.buf[:len(wb.buf.buf)-len(word)]
+ var w []byte
+ w = append(w, word...)
+ w = append(w, ws.Bytes()...)
+ wb.writeWord(w)
+ cont = false
+ continue
+ }
+ wb.writeWord(ws.Bytes())
+ }
+ if isWhitespace(rune(data[len(data)-1])) {
+ wb.buf.buf = append(wb.buf.buf, ' ')
+ }
+ return len(data), nil
+}
+
+func (wb *wordBuffer) WriteByte(c byte) error {
+ _, err := wb.Write([]byte{c})
+ return err
+}
+
+func (wb *wordBuffer) WriteString(s string) (int, error) {
+ return wb.Write([]byte(s))
+}
+
+func (wb *wordBuffer) writeWord(word []byte) {
+ if len(wb.buf.buf) > 0 {
+ wb.buf.buf = append(wb.buf.buf, ' ')
+ }
+ off := len(wb.buf.buf)
+ wb.buf.buf = append(wb.buf.buf, word...)
+ wb.words = append(wb.words, wb.buf.buf[off:off+len(word)])
+}
+
+func (wb *wordBuffer) writeWordString(word string) {
+ wb.writeWord([]byte(word))
+}
+
+func (wb *wordBuffer) Reset() {
+ wb.buf.Reset()
+ wb.words = nil
+}
+
+func (wb *wordBuffer) resetSep() {}
+
+func (wb *wordBuffer) Bytes() []byte {
+ return wb.buf.Bytes()
+}
diff --git a/golang/kati/buf_test.go b/golang/kati/buf_test.go
new file mode 100644
index 0000000..8ee14a0
--- /dev/null
+++ b/golang/kati/buf_test.go
@@ -0,0 +1,69 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "reflect"
+ "testing"
+)
+
+func TestWordBuffer(t *testing.T) {
+ for _, tc := range []struct {
+ in []string
+ want []string
+ }{
+ {
+ in: []string{"foo"},
+ want: []string{"foo"},
+ },
+ {
+ in: []string{"foo bar"},
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: []string{" foo bar\tbaz "},
+ want: []string{"foo", "bar", "baz"},
+ },
+ {
+ in: []string{"foo", "bar"},
+ want: []string{"foobar"},
+ },
+ {
+ in: []string{"foo ", "bar"},
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: []string{"foo", " bar"},
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: []string{"foo ", " bar"},
+ want: []string{"foo", "bar"},
+ },
+ } {
+ var wb wordBuffer
+ for _, s := range tc.in {
+ wb.Write([]byte(s))
+ }
+
+ var got []string
+ for _, word := range wb.words {
+ got = append(got, string(word))
+ }
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf("%q => %q; want %q", tc.in, got, tc.want)
+ }
+ }
+}
diff --git a/golang/kati/dep.go b/golang/kati/dep.go
new file mode 100644
index 0000000..2c9870d
--- /dev/null
+++ b/golang/kati/dep.go
@@ -0,0 +1,596 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "path/filepath"
+ "sort"
+ "strings"
+
+ "github.com/golang/glog"
+)
+
+// DepNode represents a makefile rule for an output.
+type DepNode struct {
+ Output string
+ Cmds []string
+ Deps []*DepNode
+ OrderOnlys []*DepNode
+ Parents []*DepNode
+ HasRule bool
+ IsPhony bool
+ ActualInputs []string
+ TargetSpecificVars Vars
+ Filename string
+ Lineno int
+}
+
+func (n *DepNode) String() string {
+ return fmt.Sprintf("Dep{output=%s cmds=%d deps=%d orders=%d hasRule=%t phony=%t filename=%s lineno=%d}",
+ n.Output, len(n.Cmds), len(n.Deps), len(n.OrderOnlys), n.HasRule, n.IsPhony, n.Filename, n.Lineno)
+}
+
+type depBuilder struct {
+ rules map[string]*rule
+ ruleVars map[string]Vars
+
+ implicitRules *ruleTrie
+
+ suffixRules map[string][]*rule
+ firstRule *rule
+ vars Vars
+ ev *Evaluator
+ vpaths searchPaths
+ done map[string]*DepNode
+ phony map[string]bool
+
+ trace []string
+ nodeCnt int
+ pickExplicitRuleCnt int
+ pickImplicitRuleCnt int
+ pickSuffixRuleCnt int
+ pickExplicitRuleWithoutCmdCnt int
+}
+
+type ruleTrieEntry struct {
+ rule *rule
+ suffix string
+}
+
+type ruleTrie struct {
+ rules []ruleTrieEntry
+ children map[byte]*ruleTrie
+}
+
+func newRuleTrie() *ruleTrie {
+ return &ruleTrie{
+ children: make(map[byte]*ruleTrie),
+ }
+}
+
+func (rt *ruleTrie) add(name string, r *rule) {
+ glog.V(1).Infof("rule trie: add %q %v %s", name, r.outputPatterns[0], r)
+ if name == "" || name[0] == '%' {
+ glog.V(1).Infof("rule trie: add entry %q %v %s", name, r.outputPatterns[0], r)
+ rt.rules = append(rt.rules, ruleTrieEntry{
+ rule: r,
+ suffix: name,
+ })
+ return
+ }
+ c, found := rt.children[name[0]]
+ if !found {
+ c = newRuleTrie()
+ rt.children[name[0]] = c
+ }
+ c.add(name[1:], r)
+}
+
+func (rt *ruleTrie) lookup(name string) []*rule {
+ glog.V(1).Infof("rule trie: lookup %q", name)
+ if rt == nil {
+ return nil
+ }
+ var rules []*rule
+ for _, entry := range rt.rules {
+ if (entry.suffix == "" && name == "") || strings.HasSuffix(name, entry.suffix[1:]) {
+ rules = append(rules, entry.rule)
+ }
+ }
+ if name == "" {
+ return rules
+ }
+ rules = append(rules, rt.children[name[0]].lookup(name[1:])...)
+ glog.V(1).Infof("rule trie: lookup %q => %v", name, rules)
+ return rules
+}
+
+func (rt *ruleTrie) size() int {
+ if rt == nil {
+ return 0
+ }
+ size := len(rt.rules)
+ for _, c := range rt.children {
+ size += c.size()
+ }
+ return size
+}
+
+func replaceSuffix(s string, newsuf string) string {
+ // TODO: Factor out the logic around suffix rules and use
+ // it from substitution references.
+ // http://www.gnu.org/software/make/manual/make.html#Substitution-Refs
+ return fmt.Sprintf("%s.%s", stripExt(s), newsuf)
+}
+
+func (db *depBuilder) exists(target string) bool {
+ _, present := db.rules[target]
+ if present {
+ return true
+ }
+ if db.phony[target] {
+ return true
+ }
+ _, ok := db.vpaths.exists(target)
+ return ok
+}
+
+func (db *depBuilder) canPickImplicitRule(r *rule, output string) bool {
+ outputPattern := r.outputPatterns[0]
+ if !outputPattern.match(output) {
+ return false
+ }
+ for _, input := range r.inputs {
+ input = outputPattern.subst(input, output)
+ if !db.exists(input) {
+ return false
+ }
+ }
+ return true
+}
+
+func (db *depBuilder) mergeImplicitRuleVars(outputs []string, vars Vars) Vars {
+ if len(outputs) != 1 {
+ // TODO(ukai): should return error?
+ panic(fmt.Sprintf("FIXME: Implicit rule should have only one output but %q", outputs))
+ }
+ glog.V(1).Infof("merge? %q", db.ruleVars)
+ glog.V(1).Infof("merge? %q", outputs[0])
+ ivars, present := db.ruleVars[outputs[0]]
+ if !present {
+ return vars
+ }
+ if vars == nil {
+ return ivars
+ }
+ glog.V(1).Info("merge!")
+ v := make(Vars)
+ v.Merge(ivars)
+ v.Merge(vars)
+ return v
+}
+
+func (db *depBuilder) pickRule(output string) (*rule, Vars, bool) {
+ r, present := db.rules[output]
+ vars := db.ruleVars[output]
+ if present {
+ db.pickExplicitRuleCnt++
+ if len(r.cmds) > 0 {
+ return r, vars, true
+ }
+ // If none of the explicit rules for a target has commands,
+ // then `make' searches for an applicable implicit rule to
+ // find some commands.
+ db.pickExplicitRuleWithoutCmdCnt++
+ }
+
+ irules := db.implicitRules.lookup(output)
+ for i := len(irules) - 1; i >= 0; i-- {
+ irule := irules[i]
+ if !db.canPickImplicitRule(irule, output) {
+ glog.Infof("ignore implicit rule %q %s", output, irule)
+ continue
+ }
+ glog.Infof("pick implicit rule %q => %q %s", output, irule.outputPatterns, irule)
+ db.pickImplicitRuleCnt++
+ if r != nil {
+ ir := &rule{}
+ *ir = *r
+ ir.outputPatterns = irule.outputPatterns
+ // implicit rule's prerequisites will be used for $<
+ ir.inputs = append(irule.inputs, ir.inputs...)
+ ir.cmds = irule.cmds
+ // TODO(ukai): filename, lineno?
+ ir.cmdLineno = irule.cmdLineno
+ return ir, vars, true
+ }
+ if vars != nil {
+ var outputs []string
+ for _, op := range irule.outputPatterns {
+ outputs = append(outputs, op.String())
+ }
+ vars = db.mergeImplicitRuleVars(outputs, vars)
+ }
+ // TODO(ukai): check len(irule.cmd) ?
+ return irule, vars, true
+ }
+
+ outputSuffix := filepath.Ext(output)
+ if !strings.HasPrefix(outputSuffix, ".") {
+ return r, vars, r != nil
+ }
+ rules, present := db.suffixRules[outputSuffix[1:]]
+ if !present {
+ return r, vars, r != nil
+ }
+ for _, irule := range rules {
+ if len(irule.inputs) != 1 {
+ // TODO(ukai): should return error?
+ panic(fmt.Sprintf("FIXME: unexpected number of input for a suffix rule (%d)", len(irule.inputs)))
+ }
+ if !db.exists(replaceSuffix(output, irule.inputs[0])) {
+ continue
+ }
+ db.pickSuffixRuleCnt++
+ if r != nil {
+ sr := &rule{}
+ *sr = *r
+ // TODO(ukai): input order is correct?
+ sr.inputs = append([]string{replaceSuffix(output, irule.inputs[0])}, r.inputs...)
+ sr.cmds = irule.cmds
+ // TODO(ukai): filename, lineno?
+ sr.cmdLineno = irule.cmdLineno
+ return sr, vars, true
+ }
+ if vars != nil {
+ vars = db.mergeImplicitRuleVars(irule.outputs, vars)
+ }
+ // TODO(ukai): check len(irule.cmd) ?
+ return irule, vars, true
+ }
+ return r, vars, r != nil
+}
+
+func expandInputs(rule *rule, output string) []string {
+ var inputs []string
+ for _, input := range rule.inputs {
+ if len(rule.outputPatterns) > 0 {
+ if len(rule.outputPatterns) != 1 {
+ panic(fmt.Sprintf("FIXME: multiple output pattern is not supported yet"))
+ }
+ input = intern(rule.outputPatterns[0].subst(input, output))
+ } else if rule.isSuffixRule {
+ input = intern(replaceSuffix(output, input))
+ }
+ inputs = append(inputs, input)
+ }
+ return inputs
+}
+
+func (db *depBuilder) buildPlan(output string, neededBy string, tsvs Vars) (*DepNode, error) {
+ glog.V(1).Infof("Evaluating command: %s", output)
+ db.nodeCnt++
+ if db.nodeCnt%100 == 0 {
+ db.reportStats()
+ }
+
+ if n, present := db.done[output]; present {
+ return n, nil
+ }
+
+ n := &DepNode{Output: output, IsPhony: db.phony[output]}
+ db.done[output] = n
+
+ // create depnode for phony targets?
+ rule, vars, present := db.pickRule(output)
+ if !present {
+ return n, nil
+ }
+
+ var restores []func()
+ if vars != nil {
+ for name, v := range vars {
+ // TODO: Consider not updating db.vars.
+ tsv := v.(*targetSpecificVar)
+ restores = append(restores, db.vars.save(name))
+ restores = append(restores, tsvs.save(name))
+ switch tsv.op {
+ case ":=", "=":
+ db.vars[name] = tsv
+ tsvs[name] = v
+ case "+=":
+ oldVar, present := db.vars[name]
+ if !present || oldVar.String() == "" {
+ db.vars[name] = tsv
+ } else {
+ var err error
+ v, err = oldVar.AppendVar(db.ev, tsv)
+ if err != nil {
+ return nil, err
+ }
+ db.vars[name] = v
+ }
+ tsvs[name] = v
+ case "?=":
+ if _, present := db.vars[name]; !present {
+ db.vars[name] = tsv
+ tsvs[name] = v
+ }
+ }
+ }
+ defer func() {
+ for _, restore := range restores {
+ restore()
+ }
+ }()
+ }
+
+ inputs := expandInputs(rule, output)
+ glog.Infof("Evaluating command: %s inputs:%q => %q", output, rule.inputs, inputs)
+ for _, input := range inputs {
+ db.trace = append(db.trace, input)
+ ni, err := db.buildPlan(input, output, tsvs)
+ db.trace = db.trace[0 : len(db.trace)-1]
+ if err != nil {
+ return nil, err
+ }
+ if ni != nil {
+ n.Deps = append(n.Deps, ni)
+ ni.Parents = append(ni.Parents, n)
+ }
+ }
+
+ for _, input := range rule.orderOnlyInputs {
+ db.trace = append(db.trace, input)
+ ni, err := db.buildPlan(input, output, tsvs)
+ db.trace = db.trace[0 : len(db.trace)-1]
+ if err != nil {
+ return nil, err
+ }
+ if n != nil {
+ n.OrderOnlys = append(n.OrderOnlys, ni)
+ ni.Parents = append(ni.Parents, n)
+ }
+ }
+
+ n.HasRule = true
+ n.Cmds = rule.cmds
+ n.ActualInputs = inputs
+ n.TargetSpecificVars = make(Vars)
+ for k, v := range tsvs {
+ if glog.V(1) {
+ glog.Infof("output=%s tsv %s=%s", output, k, v)
+ }
+ n.TargetSpecificVars[k] = v
+ }
+ n.Filename = rule.filename
+ if len(rule.cmds) > 0 {
+ if rule.cmdLineno > 0 {
+ n.Lineno = rule.cmdLineno
+ } else {
+ n.Lineno = rule.lineno
+ }
+ }
+ return n, nil
+}
+
+func (db *depBuilder) populateSuffixRule(r *rule, output string) bool {
+ if len(output) == 0 || output[0] != '.' {
+ return false
+ }
+ rest := output[1:]
+ dotIndex := strings.IndexByte(rest, '.')
+ // If there is only a single dot or the third dot, this is not a
+ // suffix rule.
+ if dotIndex < 0 || strings.IndexByte(rest[dotIndex+1:], '.') >= 0 {
+ return false
+ }
+
+ // This is a suffix rule.
+ inputSuffix := rest[:dotIndex]
+ outputSuffix := rest[dotIndex+1:]
+ sr := &rule{}
+ *sr = *r
+ sr.inputs = []string{inputSuffix}
+ sr.isSuffixRule = true
+ db.suffixRules[outputSuffix] = append([]*rule{sr}, db.suffixRules[outputSuffix]...)
+ return true
+}
+
+func mergeRules(oldRule, r *rule, output string, isSuffixRule bool) (*rule, error) {
+ if oldRule.isDoubleColon != r.isDoubleColon {
+ return nil, r.errorf("*** target file %q has both : and :: entries.", output)
+ }
+ if len(oldRule.cmds) > 0 && len(r.cmds) > 0 && !isSuffixRule && !r.isDoubleColon {
+ warn(r.cmdpos(), "overriding commands for target %q", output)
+ warn(oldRule.cmdpos(), "ignoring old commands for target %q", output)
+ }
+
+ mr := &rule{}
+ *mr = *r
+ if r.isDoubleColon {
+ mr.cmds = append(oldRule.cmds, mr.cmds...)
+ } else if len(oldRule.cmds) > 0 && len(r.cmds) == 0 {
+ mr.cmds = oldRule.cmds
+ }
+ // If the latter rule has a command (regardless of the
+ // commands in oldRule), inputs in the latter rule has a
+ // priority.
+ if len(r.cmds) > 0 {
+ mr.inputs = append(mr.inputs, oldRule.inputs...)
+ mr.orderOnlyInputs = append(mr.orderOnlyInputs, oldRule.orderOnlyInputs...)
+ } else {
+ mr.inputs = append(oldRule.inputs, mr.inputs...)
+ mr.orderOnlyInputs = append(oldRule.orderOnlyInputs, mr.orderOnlyInputs...)
+ }
+ mr.outputPatterns = append(mr.outputPatterns, oldRule.outputPatterns...)
+ return mr, nil
+}
+
+// expandPattern expands static pattern (target: target-pattern: prereq-pattern).
+
+func expandPattern(r *rule) []*rule {
+ if len(r.outputs) == 0 {
+ return []*rule{r}
+ }
+ if len(r.outputPatterns) != 1 {
+ return []*rule{r}
+ }
+ var rules []*rule
+ pat := r.outputPatterns[0]
+ for _, output := range r.outputs {
+ nr := new(rule)
+ *nr = *r
+ nr.outputs = []string{output}
+ nr.outputPatterns = nil
+ nr.inputs = nil
+ for _, input := range r.inputs {
+ nr.inputs = append(nr.inputs, intern(pat.subst(input, output)))
+ }
+ rules = append(rules, nr)
+ }
+ glog.V(1).Infof("expand static pattern: outputs=%q inputs=%q -> %q", r.outputs, r.inputs, rules)
+ return rules
+}
+
+func (db *depBuilder) populateExplicitRule(r *rule) error {
+ // It seems rules with no outputs are siliently ignored.
+ if len(r.outputs) == 0 {
+ return nil
+ }
+ for _, output := range r.outputs {
+ output = trimLeadingCurdir(output)
+
+ isSuffixRule := db.populateSuffixRule(r, output)
+
+ if oldRule, present := db.rules[output]; present {
+ mr, err := mergeRules(oldRule, r, output, isSuffixRule)
+ if err != nil {
+ return err
+ }
+ db.rules[output] = mr
+ } else {
+ db.rules[output] = r
+ if db.firstRule == nil && !strings.HasPrefix(output, ".") {
+ db.firstRule = r
+ }
+ }
+ }
+ return nil
+}
+
+func (db *depBuilder) populateImplicitRule(r *rule) {
+ for _, outputPattern := range r.outputPatterns {
+ ir := &rule{}
+ *ir = *r
+ ir.outputPatterns = []pattern{outputPattern}
+ db.implicitRules.add(outputPattern.String(), ir)
+ }
+}
+
+func (db *depBuilder) populateRules(er *evalResult) error {
+ for _, r := range er.rules {
+ for i, input := range r.inputs {
+ r.inputs[i] = trimLeadingCurdir(input)
+ }
+ for i, orderOnlyInput := range r.orderOnlyInputs {
+ r.orderOnlyInputs[i] = trimLeadingCurdir(orderOnlyInput)
+ }
+ for _, r := range expandPattern(r) {
+ err := db.populateExplicitRule(r)
+ if err != nil {
+ return err
+ }
+ if len(r.outputs) == 0 {
+ db.populateImplicitRule(r)
+ }
+ }
+ }
+ return nil
+}
+
+func (db *depBuilder) reportStats() {
+ if !PeriodicStatsFlag {
+ return
+ }
+
+ logStats("node=%d explicit=%d implicit=%d suffix=%d explicitWOCmd=%d",
+ db.nodeCnt, db.pickExplicitRuleCnt, db.pickImplicitRuleCnt, db.pickSuffixRuleCnt, db.pickExplicitRuleWithoutCmdCnt)
+ if len(db.trace) > 1 {
+ logStats("trace=%q", db.trace)
+ }
+}
+
+func newDepBuilder(er *evalResult, vars Vars) (*depBuilder, error) {
+ db := &depBuilder{
+ rules: make(map[string]*rule),
+ ruleVars: er.ruleVars,
+ implicitRules: newRuleTrie(),
+ suffixRules: make(map[string][]*rule),
+ vars: vars,
+ ev: NewEvaluator(vars),
+ vpaths: er.vpaths,
+ done: make(map[string]*DepNode),
+ phony: make(map[string]bool),
+ }
+
+ err := db.populateRules(er)
+ if err != nil {
+ return nil, err
+ }
+ rule, present := db.rules[".PHONY"]
+ if present {
+ for _, input := range rule.inputs {
+ db.phony[input] = true
+ }
+ }
+ return db, nil
+}
+
+func (db *depBuilder) Eval(targets []string) ([]*DepNode, error) {
+ if len(targets) == 0 {
+ if db.firstRule == nil {
+ return nil, fmt.Errorf("*** No targets.")
+ }
+ targets = append(targets, db.firstRule.outputs[0])
+ var phonys []string
+ for t := range db.phony {
+ phonys = append(phonys, t)
+ }
+ sort.Strings(phonys)
+ targets = append(targets, phonys...)
+ }
+
+ if StatsFlag {
+ logStats("%d variables", len(db.vars))
+ logStats("%d explicit rules", len(db.rules))
+ logStats("%d implicit rules", db.implicitRules.size())
+ logStats("%d suffix rules", len(db.suffixRules))
+ logStats("%d dirs %d files", fsCache.dirs(), fsCache.files())
+ }
+
+ var nodes []*DepNode
+ for _, target := range targets {
+ db.trace = []string{target}
+ n, err := db.buildPlan(target, "", make(Vars))
+ if err != nil {
+ return nil, err
+ }
+ nodes = append(nodes, n)
+ }
+ db.reportStats()
+ return nodes, nil
+}
diff --git a/golang/kati/depgraph.go b/golang/kati/depgraph.go
new file mode 100644
index 0000000..5b32287
--- /dev/null
+++ b/golang/kati/depgraph.go
@@ -0,0 +1,232 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "crypto/sha1"
+ "fmt"
+ "io/ioutil"
+ "strings"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+// DepGraph represents rules defined in makefiles.
+type DepGraph struct {
+ nodes []*DepNode
+ vars Vars
+ accessedMks []*accessedMakefile
+ exports map[string]bool
+ vpaths searchPaths
+}
+
+// Nodes returns all rules.
+func (g *DepGraph) Nodes() []*DepNode { return g.nodes }
+
+// Vars returns all variables.
+func (g *DepGraph) Vars() Vars { return g.vars }
+
+func (g *DepGraph) resolveVPATH() {
+ seen := make(map[*DepNode]bool)
+ var fix func(n *DepNode)
+ fix = func(n *DepNode) {
+ if seen[n] {
+ return
+ }
+ seen[n] = true
+ glog.V(3).Infof("vpath check %s [%#v]", n.Output, g.vpaths)
+ if output, ok := g.vpaths.exists(n.Output); ok {
+ glog.V(2).Infof("vpath fix %s=>%s", n.Output, output)
+ n.Output = output
+ }
+ for _, d := range n.Deps {
+ fix(d)
+ }
+ for _, d := range n.OrderOnlys {
+ fix(d)
+ }
+ for _, d := range n.Parents {
+ fix(d)
+ }
+ // fix ActualInputs?
+ }
+ for _, n := range g.nodes {
+ fix(n)
+ }
+}
+
+// LoadReq is a request to load makefile.
+type LoadReq struct {
+ Makefile string
+ Targets []string
+ CommandLineVars []string
+ EnvironmentVars []string
+ UseCache bool
+ EagerEvalCommand bool
+}
+
+// FromCommandLine creates LoadReq from given command line.
+func FromCommandLine(cmdline []string) LoadReq {
+ var vars []string
+ var targets []string
+ for _, arg := range cmdline {
+ if strings.IndexByte(arg, '=') >= 0 {
+ vars = append(vars, arg)
+ continue
+ }
+ targets = append(targets, arg)
+ }
+ mk, err := defaultMakefile()
+ if err != nil {
+ glog.Warningf("default makefile: %v", err)
+ }
+ return LoadReq{
+ Makefile: mk,
+ Targets: targets,
+ CommandLineVars: vars,
+ }
+}
+
+func initVars(vars Vars, kvlist []string, origin string) error {
+ for _, v := range kvlist {
+ kv := strings.SplitN(v, "=", 2)
+ glog.V(1).Infof("%s var %q", origin, v)
+ if len(kv) < 2 {
+ return fmt.Errorf("A weird %s variable %q", origin, kv)
+ }
+ vars.Assign(kv[0], &recursiveVar{
+ expr: literal(kv[1]),
+ origin: origin,
+ })
+ }
+ return nil
+}
+
+// Load loads makefile.
+func Load(req LoadReq) (*DepGraph, error) {
+ startTime := time.Now()
+ var err error
+ if req.Makefile == "" {
+ req.Makefile, err = defaultMakefile()
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ if req.UseCache {
+ g, err := loadCache(req.Makefile, req.Targets)
+ if err == nil {
+ return g, nil
+ }
+ }
+
+ bmk, err := bootstrapMakefile(req.Targets)
+ if err != nil {
+ return nil, err
+ }
+
+ content, err := ioutil.ReadFile(req.Makefile)
+ if err != nil {
+ return nil, err
+ }
+ mk, err := parseMakefile(content, req.Makefile)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, stmt := range mk.stmts {
+ stmt.show()
+ }
+
+ mk.stmts = append(bmk.stmts, mk.stmts...)
+
+ vars := make(Vars)
+ err = initVars(vars, req.EnvironmentVars, "environment")
+ if err != nil {
+ return nil, err
+ }
+ err = initVars(vars, req.CommandLineVars, "command line")
+ if err != nil {
+ return nil, err
+ }
+ er, err := eval(mk, vars, req.UseCache)
+ if err != nil {
+ return nil, err
+ }
+ vars.Merge(er.vars)
+
+ logStats("eval time: %q", time.Since(startTime))
+ logStats("shell func time: %q %d", shellStats.Duration(), shellStats.Count())
+
+ startTime = time.Now()
+ db, err := newDepBuilder(er, vars)
+ if err != nil {
+ return nil, err
+ }
+ logStats("dep build prepare time: %q", time.Since(startTime))
+
+ startTime = time.Now()
+ nodes, err := db.Eval(req.Targets)
+ if err != nil {
+ return nil, err
+ }
+ logStats("dep build time: %q", time.Since(startTime))
+ var accessedMks []*accessedMakefile
+ // Always put the root Makefile as the first element.
+ accessedMks = append(accessedMks, &accessedMakefile{
+ Filename: req.Makefile,
+ Hash: sha1.Sum(content),
+ State: fileExists,
+ })
+ accessedMks = append(accessedMks, er.accessedMks...)
+ gd := &DepGraph{
+ nodes: nodes,
+ vars: vars,
+ accessedMks: accessedMks,
+ exports: er.exports,
+ vpaths: er.vpaths,
+ }
+ if req.EagerEvalCommand {
+ startTime := time.Now()
+ err = evalCommands(nodes, vars)
+ if err != nil {
+ return nil, err
+ }
+ logStats("eager eval command time: %q", time.Since(startTime))
+ }
+ if req.UseCache {
+ startTime := time.Now()
+ saveCache(gd, req.Targets)
+ logStats("serialize time: %q", time.Since(startTime))
+ }
+ return gd, nil
+}
+
+// Loader is the interface that loads DepGraph.
+type Loader interface {
+ Load(string) (*DepGraph, error)
+}
+
+// Saver is the interface that saves DepGraph.
+type Saver interface {
+ Save(*DepGraph, string, []string) error
+}
+
+// LoadSaver is the interface that groups Load and Save methods.
+type LoadSaver interface {
+ Loader
+ Saver
+}
diff --git a/golang/kati/doc.go b/golang/kati/doc.go
new file mode 100644
index 0000000..488675e
--- /dev/null
+++ b/golang/kati/doc.go
@@ -0,0 +1,22 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+/*
+Package kati provides GNU make compatible functions, especially
+to speed up the continuous build of Android.
+
+*/
+package kati
+
+// TODO(ukai): add more doc comments.
diff --git a/golang/kati/eval.go b/golang/kati/eval.go
new file mode 100644
index 0000000..30308c3
--- /dev/null
+++ b/golang/kati/eval.go
@@ -0,0 +1,727 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/golang/glog"
+)
+
+type fileState int
+
+const (
+ fileExists fileState = iota
+ fileNotExists
+ fileInconsistent // Modified during kati is running.
+)
+
+type accessedMakefile struct {
+ Filename string
+ Hash [sha1.Size]byte
+ State fileState
+}
+
+type accessCache struct {
+ mu sync.Mutex
+ m map[string]*accessedMakefile
+}
+
+func newAccessCache() *accessCache {
+ return &accessCache{
+ m: make(map[string]*accessedMakefile),
+ }
+}
+
+func (ac *accessCache) update(fn string, hash [sha1.Size]byte, st fileState) string {
+ if ac == nil {
+ return ""
+ }
+ ac.mu.Lock()
+ defer ac.mu.Unlock()
+ rm, present := ac.m[fn]
+ if present {
+ switch rm.State {
+ case fileExists:
+ if st != fileExists {
+ return fmt.Sprintf("%s was removed after the previous read", fn)
+ } else if !bytes.Equal(hash[:], rm.Hash[:]) {
+ ac.m[fn].State = fileInconsistent
+ return fmt.Sprintf("%s was modified after the previous read", fn)
+ }
+ return ""
+ case fileNotExists:
+ if st != fileNotExists {
+ ac.m[fn].State = fileInconsistent
+ return fmt.Sprintf("%s was created after the previous read", fn)
+ }
+ case fileInconsistent:
+ return ""
+ }
+ return ""
+ }
+ ac.m[fn] = &accessedMakefile{
+ Filename: fn,
+ Hash: hash,
+ State: st,
+ }
+ return ""
+}
+
+func (ac *accessCache) Slice() []*accessedMakefile {
+ if ac == nil {
+ return nil
+ }
+ ac.mu.Lock()
+ defer ac.mu.Unlock()
+ r := []*accessedMakefile{}
+ for _, v := range ac.m {
+ r = append(r, v)
+ }
+ return r
+}
+
+type evalResult struct {
+ vars Vars
+ rules []*rule
+ ruleVars map[string]Vars
+ accessedMks []*accessedMakefile
+ exports map[string]bool
+ vpaths searchPaths
+}
+
+type srcpos struct {
+ filename string
+ lineno int
+}
+
+func (p srcpos) String() string {
+ return fmt.Sprintf("%s:%d", p.filename, p.lineno)
+}
+
+// EvalError is an error in kati evaluation.
+type EvalError struct {
+ Filename string
+ Lineno int
+ Err error
+}
+
+func (e EvalError) Error() string {
+ return fmt.Sprintf("%s:%d: %v", e.Filename, e.Lineno, e.Err)
+}
+
+func (p srcpos) errorf(f string, args ...interface{}) error {
+ return EvalError{
+ Filename: p.filename,
+ Lineno: p.lineno,
+ Err: fmt.Errorf(f, args...),
+ }
+}
+
+func (p srcpos) error(err error) error {
+ if _, ok := err.(EvalError); ok {
+ return err
+ }
+ return EvalError{
+ Filename: p.filename,
+ Lineno: p.lineno,
+ Err: err,
+ }
+}
+
+// Evaluator manages makefile evaluation.
+type Evaluator struct {
+ paramVars []tmpval // $1 => paramVars[1]
+ outVars Vars
+ outRules []*rule
+ outRuleVars map[string]Vars
+ vars Vars
+ lastRule *rule
+ currentScope Vars
+ cache *accessCache
+ exports map[string]bool
+ vpaths []vpath
+
+ avoidIO bool
+ hasIO bool
+ // delayedOutputs are commands which should run at ninja-time
+ // (i.e., info, warning, and error).
+ delayedOutputs []string
+
+ srcpos
+}
+
+// NewEvaluator creates new Evaluator.
+func NewEvaluator(vars map[string]Var) *Evaluator {
+ return &Evaluator{
+ outVars: make(Vars),
+ vars: vars,
+ outRuleVars: make(map[string]Vars),
+ exports: make(map[string]bool),
+ }
+}
+
+func (ev *Evaluator) args(buf *evalBuffer, args ...Value) ([][]byte, error) {
+ pos := make([]int, 0, len(args))
+ for _, arg := range args {
+ buf.resetSep()
+ err := arg.Eval(buf, ev)
+ if err != nil {
+ return nil, err
+ }
+ pos = append(pos, buf.Len())
+ }
+ v := buf.Bytes()
+ buf.args = buf.args[:0]
+ s := 0
+ for _, p := range pos {
+ buf.args = append(buf.args, v[s:p])
+ s = p
+ }
+ return buf.args, nil
+}
+
+func (ev *Evaluator) evalAssign(ast *assignAST) error {
+ ev.lastRule = nil
+ lhs, rhs, err := ev.evalAssignAST(ast)
+ if err != nil {
+ return err
+ }
+ if glog.V(1) {
+ glog.Infof("ASSIGN: %s=%q (flavor:%q)", lhs, rhs, rhs.Flavor())
+ }
+ if lhs == "" {
+ return ast.errorf("*** empty variable name.")
+ }
+ ev.outVars.Assign(lhs, rhs)
+ return nil
+}
+
+func (ev *Evaluator) evalAssignAST(ast *assignAST) (string, Var, error) {
+ ev.srcpos = ast.srcpos
+
+ var lhs string
+ switch v := ast.lhs.(type) {
+ case literal:
+ lhs = string(v)
+ case tmpval:
+ lhs = string(v)
+ default:
+ buf := newEbuf()
+ err := v.Eval(buf, ev)
+ if err != nil {
+ return "", nil, err
+ }
+ lhs = string(trimSpaceBytes(buf.Bytes()))
+ buf.release()
+ }
+ rhs, err := ast.evalRHS(ev, lhs)
+ if err != nil {
+ return "", nil, err
+ }
+ return lhs, rhs, nil
+}
+
+func (ev *Evaluator) setTargetSpecificVar(assign *assignAST, output string) error {
+ vars, present := ev.outRuleVars[output]
+ if !present {
+ vars = make(Vars)
+ ev.outRuleVars[output] = vars
+ }
+ ev.currentScope = vars
+ lhs, rhs, err := ev.evalAssignAST(assign)
+ if err != nil {
+ return err
+ }
+ if glog.V(1) {
+ glog.Infof("rule outputs:%q assign:%q%s%q (flavor:%q)", output, lhs, assign.op, rhs, rhs.Flavor())
+ }
+ vars.Assign(lhs, &targetSpecificVar{v: rhs, op: assign.op})
+ ev.currentScope = nil
+ return nil
+}
+
+func (ev *Evaluator) evalMaybeRule(ast *maybeRuleAST) error {
+ ev.lastRule = nil
+ ev.srcpos = ast.srcpos
+
+ if glog.V(1) {
+ glog.Infof("maybe rule %s: %q assign:%v", ev.srcpos, ast.expr, ast.assign)
+ }
+
+ abuf := newEbuf()
+ aexpr := toExpr(ast.expr)
+ var rhs expr
+ semi := ast.semi
+ for i, v := range aexpr {
+ var hashFound bool
+ var buf evalBuffer
+ buf.resetSep()
+ switch v.(type) {
+ case literal, tmpval:
+ s := v.String()
+ i := strings.Index(s, "#")
+ if i >= 0 {
+ hashFound = true
+ v = tmpval(trimRightSpaceBytes([]byte(s[:i])))
+ }
+ }
+ err := v.Eval(&buf, ev)
+ if err != nil {
+ return err
+ }
+ b := buf.Bytes()
+ if ast.isRule {
+ abuf.Write(b)
+ continue
+ }
+ eq := findLiteralChar(b, '=', 0, skipVar)
+ if eq >= 0 {
+ abuf.Write(b[:eq+1])
+ if eq+1 < len(b) {
+ rhs = append(rhs, tmpval(trimLeftSpaceBytes(b[eq+1:])))
+ }
+ if i+1 < len(aexpr) {
+ rhs = append(rhs, aexpr[i+1:]...)
+ }
+ if ast.semi != nil {
+ rhs = append(rhs, literal(';'))
+ sexpr, _, err := parseExpr(ast.semi, nil, parseOp{})
+ if err != nil {
+ return err
+ }
+ rhs = append(rhs, toExpr(sexpr)...)
+ semi = nil
+ }
+ break
+ }
+ abuf.Write(b)
+ if hashFound {
+ break
+ }
+ }
+
+ line := abuf.Bytes()
+ r := &rule{srcpos: ast.srcpos}
+ if glog.V(1) {
+ glog.Infof("rule? %s: %q assign:%v rhs:%s", r.srcpos, line, ast.assign, rhs)
+ }
+ assign, err := r.parse(line, ast.assign, rhs)
+ if err != nil {
+ ws := newWordScanner(line)
+ if ws.Scan() {
+ if string(ws.Bytes()) == "override" {
+ warnNoPrefix(ast.srcpos, "invalid `override' directive")
+ return nil
+ }
+ }
+ return ast.error(err)
+ }
+ abuf.release()
+ if glog.V(1) {
+ glog.Infof("rule %q assign:%v rhs:%v=> outputs:%q, inputs:%q", ast.expr, ast.assign, rhs, r.outputs, r.inputs)
+ }
+
+ // TODO: Pretty print.
+ // glog.V(1).Infof("RULE: %s=%s (%d commands)", lhs, rhs, len(cmds))
+
+ if assign != nil {
+ glog.V(1).Infof("target specific var: %#v", assign)
+ for _, output := range r.outputs {
+ ev.setTargetSpecificVar(assign, output)
+ }
+ for _, output := range r.outputPatterns {
+ ev.setTargetSpecificVar(assign, output.String())
+ }
+ return nil
+ }
+
+ if semi != nil {
+ r.cmds = append(r.cmds, string(semi))
+ }
+ if glog.V(1) {
+ glog.Infof("rule outputs:%q cmds:%q", r.outputs, r.cmds)
+ }
+ ev.lastRule = r
+ ev.outRules = append(ev.outRules, r)
+ return nil
+}
+
+func (ev *Evaluator) evalCommand(ast *commandAST) error {
+ ev.srcpos = ast.srcpos
+ if ev.lastRule == nil || ev.lastRule.outputs == nil {
+ // This could still be an assignment statement. See
+ // assign_after_tab.mk.
+ if strings.IndexByte(ast.cmd, '=') >= 0 {
+ line := trimLeftSpace(ast.cmd)
+ mk, err := parseMakefileString(line, ast.srcpos)
+ if err != nil {
+ return ast.errorf("parse failed: %q: %v", line, err)
+ }
+ if len(mk.stmts) >= 1 && mk.stmts[len(mk.stmts)-1].(*assignAST) != nil {
+ for _, stmt := range mk.stmts {
+ err = ev.eval(stmt)
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }
+ // Or, a comment is OK.
+ if strings.TrimSpace(ast.cmd)[0] == '#' {
+ return nil
+ }
+ return ast.errorf("*** commands commence before first target.")
+ }
+ ev.lastRule.cmds = append(ev.lastRule.cmds, ast.cmd)
+ if ev.lastRule.cmdLineno == 0 {
+ ev.lastRule.cmdLineno = ast.lineno
+ }
+ return nil
+}
+
+func (ev *Evaluator) paramVar(name string) (Var, error) {
+ idx, err := strconv.ParseInt(name, 10, 32)
+ if err != nil {
+ return nil, fmt.Errorf("param: %s: %v", name, err)
+ }
+ i := int(idx)
+ if i < 0 || i >= len(ev.paramVars) {
+ return nil, fmt.Errorf("param: %s out of %d", name, len(ev.paramVars))
+ }
+ return &automaticVar{value: []byte(ev.paramVars[i])}, nil
+}
+
+// LookupVar looks up named variable.
+func (ev *Evaluator) LookupVar(name string) Var {
+ if ev.currentScope != nil {
+ v := ev.currentScope.Lookup(name)
+ if v.IsDefined() {
+ return v
+ }
+ }
+ v := ev.outVars.Lookup(name)
+ if v.IsDefined() {
+ return v
+ }
+ v, err := ev.paramVar(name)
+ if err == nil {
+ return v
+ }
+ return ev.vars.Lookup(name)
+}
+
+func (ev *Evaluator) lookupVarInCurrentScope(name string) Var {
+ if ev.currentScope != nil {
+ v := ev.currentScope.Lookup(name)
+ return v
+ }
+ v := ev.outVars.Lookup(name)
+ if v.IsDefined() {
+ return v
+ }
+ v, err := ev.paramVar(name)
+ if err == nil {
+ return v
+ }
+ return ev.vars.Lookup(name)
+}
+
+// EvaluateVar evaluates variable named name.
+// Only for a few special uses such as getting SHELL and handling
+// export/unexport.
+func (ev *Evaluator) EvaluateVar(name string) (string, error) {
+ var buf evalBuffer
+ buf.resetSep()
+ err := ev.LookupVar(name).Eval(&buf, ev)
+ if err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+func (ev *Evaluator) evalIncludeFile(fname string, mk makefile) error {
+ te := traceEvent.begin("include", literal(fname), traceEventMain)
+ defer func() {
+ traceEvent.end(te)
+ }()
+ var err error
+ makefileList := ev.outVars.Lookup("MAKEFILE_LIST")
+ makefileList, err = makefileList.Append(ev, mk.filename)
+ if err != nil {
+ return err
+ }
+ ev.outVars.Assign("MAKEFILE_LIST", makefileList)
+
+ for _, stmt := range mk.stmts {
+ err = ev.eval(stmt)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (ev *Evaluator) evalInclude(ast *includeAST) error {
+ ev.lastRule = nil
+ ev.srcpos = ast.srcpos
+
+ glog.Infof("%s include %q", ev.srcpos, ast.expr)
+ v, _, err := parseExpr([]byte(ast.expr), nil, parseOp{})
+ if err != nil {
+ return ast.errorf("parse failed: %q: %v", ast.expr, err)
+ }
+ var buf evalBuffer
+ buf.resetSep()
+ err = v.Eval(&buf, ev)
+ if err != nil {
+ return ast.errorf("%v", err)
+ }
+ pats := splitSpaces(buf.String())
+ buf.Reset()
+
+ var files []string
+ for _, pat := range pats {
+ if strings.Contains(pat, "*") || strings.Contains(pat, "?") {
+ matched, err := filepath.Glob(pat)
+ if err != nil {
+ return ast.errorf("glob error: %s: %v", pat, err)
+ }
+ files = append(files, matched...)
+ } else {
+ files = append(files, pat)
+ }
+ }
+
+ for _, fn := range files {
+ fn = trimLeadingCurdir(fn)
+ if IgnoreOptionalInclude != "" && ast.op == "-include" && matchPattern(fn, IgnoreOptionalInclude) {
+ continue
+ }
+ mk, hash, err := makefileCache.parse(fn)
+ if os.IsNotExist(err) {
+ if ast.op == "include" {
+ return ev.errorf("%v\nNOTE: kati does not support generating missing makefiles", err)
+ }
+ msg := ev.cache.update(fn, hash, fileNotExists)
+ if msg != "" {
+ warn(ev.srcpos, "%s", msg)
+ }
+ continue
+ }
+ msg := ev.cache.update(fn, hash, fileExists)
+ if msg != "" {
+ warn(ev.srcpos, "%s", msg)
+ }
+ err = ev.evalIncludeFile(fn, mk)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (ev *Evaluator) evalIf(iast *ifAST) error {
+ var isTrue bool
+ switch iast.op {
+ case "ifdef", "ifndef":
+ expr := iast.lhs
+ buf := newEbuf()
+ err := expr.Eval(buf, ev)
+ if err != nil {
+ return iast.errorf("%v\n expr:%s", err, expr)
+ }
+ v := ev.LookupVar(buf.String())
+ buf.Reset()
+ err = v.Eval(buf, ev)
+ if err != nil {
+ return iast.errorf("%v\n expr:%s=>%s", err, expr, v)
+ }
+ value := buf.String()
+ val := buf.Len()
+ buf.release()
+ isTrue = (val > 0) == (iast.op == "ifdef")
+ if glog.V(1) {
+ glog.Infof("%s lhs=%q value=%q => %t", iast.op, iast.lhs, value, isTrue)
+ }
+ case "ifeq", "ifneq":
+ lexpr := iast.lhs
+ rexpr := iast.rhs
+ buf := newEbuf()
+ params, err := ev.args(buf, lexpr, rexpr)
+ if err != nil {
+ return iast.errorf("%v\n (%s,%s)", err, lexpr, rexpr)
+ }
+ lhs := string(params[0])
+ rhs := string(params[1])
+ buf.release()
+ isTrue = (lhs == rhs) == (iast.op == "ifeq")
+ if glog.V(1) {
+ glog.Infof("%s lhs=%q %q rhs=%q %q => %t", iast.op, iast.lhs, lhs, iast.rhs, rhs, isTrue)
+ }
+ default:
+ return iast.errorf("unknown if statement: %q", iast.op)
+ }
+
+ var stmts []ast
+ if isTrue {
+ stmts = iast.trueStmts
+ } else {
+ stmts = iast.falseStmts
+ }
+ for _, stmt := range stmts {
+ err := ev.eval(stmt)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (ev *Evaluator) evalExport(ast *exportAST) error {
+ ev.lastRule = nil
+ ev.srcpos = ast.srcpos
+
+ v, _, err := parseExpr(ast.expr, nil, parseOp{})
+ if err != nil {
+ return ast.errorf("failed to parse: %q: %v", string(ast.expr), err)
+ }
+ var buf evalBuffer
+ buf.resetSep()
+ err = v.Eval(&buf, ev)
+ if err != nil {
+ return ast.errorf("%v\n expr:%s", err, v)
+ }
+ if ast.hasEqual {
+ ev.exports[string(trimSpaceBytes(buf.Bytes()))] = ast.export
+ } else {
+ for _, n := range splitSpacesBytes(buf.Bytes()) {
+ ev.exports[string(n)] = ast.export
+ }
+ }
+ return nil
+}
+
+func (ev *Evaluator) evalVpath(ast *vpathAST) error {
+ ev.lastRule = nil
+ ev.srcpos = ast.srcpos
+
+ var ebuf evalBuffer
+ ebuf.resetSep()
+ err := ast.expr.Eval(&ebuf, ev)
+ if err != nil {
+ return ast.errorf("%v\n expr:%s", err, ast.expr)
+ }
+ ws := newWordScanner(ebuf.Bytes())
+ if !ws.Scan() {
+ ev.vpaths = nil
+ return nil
+ }
+ pat := string(ws.Bytes())
+ if !ws.Scan() {
+ vpaths := ev.vpaths
+ ev.vpaths = nil
+ for _, v := range vpaths {
+ if v.pattern == pat {
+ continue
+ }
+ ev.vpaths = append(ev.vpaths, v)
+ }
+ return nil
+ }
+ // The search path, DIRECTORIES, is a list of directories to be
+ // searched, separated by colons (semi-colons on MS-DOS and
+ // MS-Windows) or blanks, just like the search path used in the
+ // `VPATH' variable.
+ var dirs []string
+ for {
+ for _, dir := range bytes.Split(ws.Bytes(), []byte{':'}) {
+ dirs = append(dirs, string(dir))
+ }
+ if !ws.Scan() {
+ break
+ }
+ }
+ ev.vpaths = append(ev.vpaths, vpath{
+ pattern: pat,
+ dirs: dirs,
+ })
+ return nil
+}
+
+func (ev *Evaluator) eval(stmt ast) error {
+ return stmt.eval(ev)
+}
+
+func eval(mk makefile, vars Vars, useCache bool) (er *evalResult, err error) {
+ ev := NewEvaluator(vars)
+ if useCache {
+ ev.cache = newAccessCache()
+ }
+
+ makefileList := vars.Lookup("MAKEFILE_LIST")
+ if !makefileList.IsDefined() {
+ makefileList = &simpleVar{value: []string{""}, origin: "file"}
+ }
+ makefileList, err = makefileList.Append(ev, mk.filename)
+ if err != nil {
+ return nil, err
+ }
+ ev.outVars.Assign("MAKEFILE_LIST", makefileList)
+
+ for _, stmt := range mk.stmts {
+ err = ev.eval(stmt)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ vpaths := searchPaths{
+ vpaths: ev.vpaths,
+ }
+ v, found := ev.outVars["VPATH"]
+ if found {
+ wb := newWbuf()
+ err := v.Eval(wb, ev)
+ if err != nil {
+ return nil, err
+ }
+ // In the 'VPATH' variable, directory names are separated
+ // by colons or blanks. (on windows, semi-colons)
+ for _, word := range wb.words {
+ for _, dir := range bytes.Split(word, []byte{':'}) {
+ vpaths.dirs = append(vpaths.dirs, string(dir))
+ }
+ }
+ }
+ glog.Infof("vpaths: %#v", vpaths)
+
+ return &evalResult{
+ vars: ev.outVars,
+ rules: ev.outRules,
+ ruleVars: ev.outRuleVars,
+ accessedMks: ev.cache.Slice(),
+ exports: ev.exports,
+ vpaths: vpaths,
+ }, nil
+}
diff --git a/golang/kati/evalcmd.go b/golang/kati/evalcmd.go
new file mode 100644
index 0000000..37f94b8
--- /dev/null
+++ b/golang/kati/evalcmd.go
@@ -0,0 +1,369 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "os/exec"
+ "strings"
+ "sync"
+
+ "github.com/golang/glog"
+)
+
+type execContext struct {
+ shell string
+
+ mu sync.Mutex
+ ev *Evaluator
+ vpaths searchPaths
+ output string
+ inputs []string
+}
+
+func newExecContext(vars Vars, vpaths searchPaths, avoidIO bool) *execContext {
+ ev := NewEvaluator(vars)
+ ev.avoidIO = avoidIO
+
+ ctx := &execContext{
+ ev: ev,
+ vpaths: vpaths,
+ }
+ av := autoVar{ctx: ctx}
+ for k, v := range map[string]Var{
+ "@": autoAtVar{autoVar: av},
+ "<": autoLessVar{autoVar: av},
+ "^": autoHatVar{autoVar: av},
+ "+": autoPlusVar{autoVar: av},
+ "*": autoStarVar{autoVar: av},
+ } {
+ ev.vars[k] = v
+ // $<k>D = $(patsubst %/,%,$(dir $<k>))
+ ev.vars[k+"D"] = suffixDVar(k)
+ // $<k>F = $(notdir $<k>)
+ ev.vars[k+"F"] = suffixFVar(k)
+ }
+
+ // TODO: We should move this to somewhere around evalCmd so that
+ // we can handle SHELL in target specific variables.
+ shell, err := ev.EvaluateVar("SHELL")
+ if err != nil {
+ shell = "/bin/sh"
+ }
+ ctx.shell = shell
+ return ctx
+}
+
+func (ec *execContext) uniqueInputs() []string {
+ var uniqueInputs []string
+ seen := make(map[string]bool)
+ for _, input := range ec.inputs {
+ if !seen[input] {
+ seen[input] = true
+ uniqueInputs = append(uniqueInputs, input)
+ }
+ }
+ return uniqueInputs
+}
+
+type autoVar struct{ ctx *execContext }
+
+func (v autoVar) Flavor() string { return "undefined" }
+func (v autoVar) Origin() string { return "automatic" }
+func (v autoVar) IsDefined() bool { return true }
+func (v autoVar) Append(*Evaluator, string) (Var, error) {
+ return nil, fmt.Errorf("cannot append to autovar")
+}
+func (v autoVar) AppendVar(*Evaluator, Value) (Var, error) {
+ return nil, fmt.Errorf("cannot append to autovar")
+}
+func (v autoVar) serialize() serializableVar {
+ return serializableVar{Type: ""}
+}
+func (v autoVar) dump(d *dumpbuf) {
+ d.err = fmt.Errorf("cannot dump auto var: %v", v)
+}
+
+type autoAtVar struct{ autoVar }
+
+func (v autoAtVar) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, v.String())
+ return nil
+}
+func (v autoAtVar) String() string { return v.ctx.output }
+
+type autoLessVar struct{ autoVar }
+
+func (v autoLessVar) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, v.String())
+ return nil
+}
+func (v autoLessVar) String() string {
+ if len(v.ctx.inputs) > 0 {
+ return v.ctx.inputs[0]
+ }
+ return ""
+}
+
+type autoHatVar struct{ autoVar }
+
+func (v autoHatVar) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, v.String())
+ return nil
+}
+func (v autoHatVar) String() string {
+ return strings.Join(v.ctx.uniqueInputs(), " ")
+}
+
+type autoPlusVar struct{ autoVar }
+
+func (v autoPlusVar) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, v.String())
+ return nil
+}
+func (v autoPlusVar) String() string { return strings.Join(v.ctx.inputs, " ") }
+
+type autoStarVar struct{ autoVar }
+
+func (v autoStarVar) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, v.String())
+ return nil
+}
+
+// TODO: Use currentStem. See auto_stem_var.mk
+func (v autoStarVar) String() string { return stripExt(v.ctx.output) }
+
+func suffixDVar(k string) Var {
+ return &recursiveVar{
+ expr: expr{
+ &funcPatsubst{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(patsubst"),
+ literal("%/"),
+ literal("%"),
+ &funcDir{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(dir"),
+ &varref{
+ varname: literal(k),
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ origin: "automatic",
+ }
+}
+
+func suffixFVar(k string) Var {
+ return &recursiveVar{
+ expr: expr{
+ &funcNotdir{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(notdir"),
+ &varref{varname: literal(k)},
+ },
+ },
+ },
+ },
+ origin: "automatic",
+ }
+}
+
+// runner is a single shell command invocation.
+type runner struct {
+ output string
+ cmd string
+ echo bool
+ ignoreError bool
+ shell string
+}
+
+func (r runner) String() string {
+ cmd := r.cmd
+ if !r.echo {
+ cmd = "@" + cmd
+ }
+ if r.ignoreError {
+ cmd = "-" + cmd
+ }
+ return cmd
+}
+
+func (r runner) forCmd(s string) runner {
+ for {
+ s = trimLeftSpace(s)
+ if s == "" {
+ return runner{}
+ }
+ switch s[0] {
+ case '@':
+ if !DryRunFlag {
+ r.echo = false
+ }
+ s = s[1:]
+ continue
+ case '-':
+ r.ignoreError = true
+ s = s[1:]
+ continue
+ }
+ break
+ }
+ r.cmd = s
+ return r
+}
+
+func (r runner) eval(ev *Evaluator, s string) ([]runner, error) {
+ r = r.forCmd(s)
+ if strings.IndexByte(r.cmd, '$') < 0 {
+ // fast path
+ return []runner{r}, nil
+ }
+ // TODO(ukai): parse once more earlier?
+ expr, _, err := parseExpr([]byte(r.cmd), nil, parseOp{})
+ if err != nil {
+ return nil, ev.errorf("parse cmd %q: %v", r.cmd, err)
+ }
+ buf := newEbuf()
+ err = expr.Eval(buf, ev)
+ if err != nil {
+ return nil, err
+ }
+ cmds := buf.String()
+ buf.release()
+ glog.V(1).Infof("evalcmd: %q => %q", r.cmd, cmds)
+ var runners []runner
+ for _, cmd := range strings.Split(cmds, "\n") {
+ if len(runners) > 0 && strings.HasSuffix(runners[len(runners)-1].cmd, "\\") {
+ runners[len(runners)-1].cmd += "\n"
+ runners[len(runners)-1].cmd += cmd
+ continue
+ }
+ runners = append(runners, r.forCmd(cmd))
+ }
+ return runners, nil
+}
+
+func (r runner) run(output string) error {
+ if r.echo || DryRunFlag {
+ fmt.Printf("%s\n", r.cmd)
+ }
+ s := cmdline(r.cmd)
+ glog.Infof("sh:%q", s)
+ if DryRunFlag {
+ return nil
+ }
+ args := []string{r.shell, "-c", s}
+ cmd := exec.Cmd{
+ Path: args[0],
+ Args: args,
+ }
+ out, err := cmd.CombinedOutput()
+ fmt.Printf("%s", out)
+ exit := exitStatus(err)
+ if r.ignoreError && exit != 0 {
+ fmt.Printf("[%s] Error %d (ignored)\n", output, exit)
+ err = nil
+ }
+ return err
+}
+
+func createRunners(ctx *execContext, n *DepNode) ([]runner, bool, error) {
+ var runners []runner
+ if len(n.Cmds) == 0 {
+ return runners, false, nil
+ }
+
+ ctx.mu.Lock()
+ defer ctx.mu.Unlock()
+ // For automatic variables.
+ ctx.output = n.Output
+ ctx.inputs = n.ActualInputs
+ for k, v := range n.TargetSpecificVars {
+ restore := ctx.ev.vars.save(k)
+ defer restore()
+ ctx.ev.vars[k] = v
+ if glog.V(1) {
+ glog.Infof("set tsv: %s=%s", k, v)
+ }
+ }
+
+ ctx.ev.filename = n.Filename
+ ctx.ev.lineno = n.Lineno
+ glog.Infof("Building: %s cmds:%q", n.Output, n.Cmds)
+ r := runner{
+ output: n.Output,
+ echo: true,
+ shell: ctx.shell,
+ }
+ for _, cmd := range n.Cmds {
+ rr, err := r.eval(ctx.ev, cmd)
+ if err != nil {
+ return nil, false, err
+ }
+ for _, r := range rr {
+ if len(r.cmd) != 0 {
+ runners = append(runners, r)
+ }
+ }
+ }
+ if len(ctx.ev.delayedOutputs) > 0 {
+ var nrunners []runner
+ r := runner{
+ output: n.Output,
+ shell: ctx.shell,
+ }
+ for _, o := range ctx.ev.delayedOutputs {
+ nrunners = append(nrunners, r.forCmd(o))
+ }
+ nrunners = append(nrunners, runners...)
+ runners = nrunners
+ ctx.ev.delayedOutputs = nil
+ }
+ return runners, ctx.ev.hasIO, nil
+}
+
+func evalCommands(nodes []*DepNode, vars Vars) error {
+ ioCnt := 0
+ ectx := newExecContext(vars, searchPaths{}, true)
+ for i, n := range nodes {
+ runners, hasIO, err := createRunners(ectx, n)
+ if err != nil {
+ return err
+ }
+ if hasIO {
+ ioCnt++
+ if ioCnt%100 == 0 {
+ logStats("%d/%d rules have IO", ioCnt, i+1)
+ }
+ continue
+ }
+
+ n.Cmds = []string{}
+ n.TargetSpecificVars = make(Vars)
+ for _, r := range runners {
+ n.Cmds = append(n.Cmds, r.String())
+ }
+ }
+ logStats("%d/%d rules have IO", ioCnt, len(nodes))
+ return nil
+}
diff --git a/golang/kati/exec.go b/golang/kati/exec.go
new file mode 100644
index 0000000..fbcf41e
--- /dev/null
+++ b/golang/kati/exec.go
@@ -0,0 +1,203 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "os"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+// Executor manages execution of makefile rules.
+type Executor struct {
+ rules map[string]*rule
+ implicitRules []*rule
+ suffixRules map[string][]*rule
+ firstRule *rule
+ // target -> Job, nil means the target is currently being processed.
+ done map[string]*job
+
+ wm *workerManager
+
+ ctx *execContext
+
+ trace []string
+ buildCnt int
+ alreadyDoneCnt int
+ noRuleCnt int
+ upToDateCnt int
+ runCommandCnt int
+}
+
+func (ex *Executor) makeJobs(n *DepNode, neededBy *job) error {
+ output, _ := ex.ctx.vpaths.exists(n.Output)
+ if neededBy != nil {
+ glog.V(1).Infof("MakeJob: %s for %s", output, neededBy.n.Output)
+ }
+ n.Output = output
+ ex.buildCnt++
+ if ex.buildCnt%100 == 0 {
+ ex.reportStats()
+ }
+
+ j, present := ex.done[output]
+
+ if present {
+ if j == nil {
+ if !n.IsPhony {
+ fmt.Printf("Circular %s <- %s dependency dropped.\n", neededBy.n.Output, n.Output)
+ }
+ if neededBy != nil {
+ neededBy.numDeps--
+ }
+ } else {
+ glog.Infof("%s already done: %d", j.n.Output, j.outputTs)
+ if neededBy != nil {
+ ex.wm.ReportNewDep(j, neededBy)
+ }
+ }
+ return nil
+ }
+
+ j = &job{
+ n: n,
+ ex: ex,
+ numDeps: len(n.Deps) + len(n.OrderOnlys),
+ depsTs: int64(-1),
+ }
+ if neededBy != nil {
+ j.parents = append(j.parents, neededBy)
+ }
+
+ ex.done[output] = nil
+ // We iterate n.Deps twice. In the first run, we may modify
+ // numDeps. There will be a race if we do so after the first
+ // ex.makeJobs(d, j).
+ var deps []*DepNode
+ for _, d := range n.Deps {
+ deps = append(deps, d)
+ }
+ for _, d := range n.OrderOnlys {
+ if _, ok := ex.ctx.vpaths.exists(d.Output); ok {
+ j.numDeps--
+ continue
+ }
+ deps = append(deps, d)
+ }
+ glog.V(1).Infof("new: %s (%d)", j.n.Output, j.numDeps)
+
+ for _, d := range deps {
+ ex.trace = append(ex.trace, d.Output)
+ err := ex.makeJobs(d, j)
+ ex.trace = ex.trace[0 : len(ex.trace)-1]
+ if err != nil {
+ return err
+ }
+ }
+
+ ex.done[output] = j
+ return ex.wm.PostJob(j)
+}
+
+func (ex *Executor) reportStats() {
+ if !PeriodicStatsFlag {
+ return
+ }
+
+ logStats("build=%d alreadyDone=%d noRule=%d, upToDate=%d runCommand=%d",
+ ex.buildCnt, ex.alreadyDoneCnt, ex.noRuleCnt, ex.upToDateCnt, ex.runCommandCnt)
+ if len(ex.trace) > 1 {
+ logStats("trace=%q", ex.trace)
+ }
+}
+
+// ExecutorOpt is an option for Executor.
+type ExecutorOpt struct {
+ NumJobs int
+}
+
+// NewExecutor creates new Executor.
+func NewExecutor(opt *ExecutorOpt) (*Executor, error) {
+ if opt == nil {
+ opt = &ExecutorOpt{NumJobs: 1}
+ }
+ if opt.NumJobs < 1 {
+ opt.NumJobs = 1
+ }
+ wm, err := newWorkerManager(opt.NumJobs)
+ if err != nil {
+ return nil, err
+ }
+ ex := &Executor{
+ rules: make(map[string]*rule),
+ suffixRules: make(map[string][]*rule),
+ done: make(map[string]*job),
+ wm: wm,
+ }
+ return ex, nil
+}
+
+// Exec executes to build targets, or first target in DepGraph.
+func (ex *Executor) Exec(g *DepGraph, targets []string) error {
+ ex.ctx = newExecContext(g.vars, g.vpaths, false)
+
+ // TODO: Handle target specific variables.
+ for name, export := range g.exports {
+ if export {
+ v, err := ex.ctx.ev.EvaluateVar(name)
+ if err != nil {
+ return err
+ }
+ os.Setenv(name, v)
+ } else {
+ os.Unsetenv(name)
+ }
+ }
+
+ startTime := time.Now()
+ var nodes []*DepNode
+ if len(targets) == 0 {
+ if len(g.nodes) > 0 {
+ nodes = append(nodes, g.nodes[0])
+ }
+ } else {
+ m := make(map[string]*DepNode)
+ for _, n := range g.nodes {
+ m[n.Output] = n
+ }
+ for _, t := range targets {
+ n := m[t]
+ if n != nil {
+ nodes = append(nodes, n)
+ }
+ }
+ }
+ for _, root := range nodes {
+ err := ex.makeJobs(root, nil)
+ if err != nil {
+ break
+ }
+ }
+ n, err := ex.wm.Wait()
+ logStats("exec time: %q", time.Since(startTime))
+ if n == 0 {
+ for _, root := range nodes {
+ fmt.Printf("kati: Nothing to be done for `%s'.\n", root.Output)
+ }
+ }
+ return err
+}
diff --git a/golang/kati/expr.go b/golang/kati/expr.go
new file mode 100644
index 0000000..79978b6
--- /dev/null
+++ b/golang/kati/expr.go
@@ -0,0 +1,767 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "github.com/golang/glog"
+)
+
+var (
+ errEndOfInput = errors.New("unexpected end of input")
+ errNotLiteral = errors.New("valueNum: not literal")
+
+ errUnterminatedVariableReference = errors.New("*** unterminated variable reference.")
+)
+
+type evalWriter interface {
+ io.Writer
+ writeWord([]byte)
+ writeWordString(string)
+ resetSep()
+}
+
+// Value is an interface for value.
+type Value interface {
+ String() string
+ Eval(w evalWriter, ev *Evaluator) error
+ serialize() serializableVar
+ dump(d *dumpbuf)
+}
+
+// literal is literal value.
+type literal string
+
+func (s literal) String() string { return string(s) }
+func (s literal) Eval(w evalWriter, ev *Evaluator) error {
+ io.WriteString(w, string(s))
+ return nil
+}
+func (s literal) serialize() serializableVar {
+ return serializableVar{Type: "literal", V: string(s)}
+}
+func (s literal) dump(d *dumpbuf) {
+ d.Byte(valueTypeLiteral)
+ d.Bytes([]byte(s))
+}
+
+// tmpval is temporary value.
+type tmpval []byte
+
+func (t tmpval) String() string { return string(t) }
+func (t tmpval) Eval(w evalWriter, ev *Evaluator) error {
+ w.Write(t)
+ return nil
+}
+func (t tmpval) Value() []byte { return []byte(t) }
+func (t tmpval) serialize() serializableVar {
+ return serializableVar{Type: "tmpval", V: string(t)}
+}
+func (t tmpval) dump(d *dumpbuf) {
+ d.Byte(valueTypeTmpval)
+ d.Bytes(t)
+}
+
+// expr is a list of values.
+type expr []Value
+
+func (e expr) String() string {
+ var s []string
+ for _, v := range e {
+ s = append(s, v.String())
+ }
+ return strings.Join(s, "")
+}
+
+func (e expr) Eval(w evalWriter, ev *Evaluator) error {
+ for _, v := range e {
+ w.resetSep()
+ err := v.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (e expr) serialize() serializableVar {
+ r := serializableVar{Type: "expr"}
+ for _, v := range e {
+ r.Children = append(r.Children, v.serialize())
+ }
+ return r
+}
+func (e expr) dump(d *dumpbuf) {
+ d.Byte(valueTypeExpr)
+ d.Int(len(e))
+ for _, v := range e {
+ v.dump(d)
+ }
+}
+
+func compactExpr(e expr) Value {
+ if len(e) == 1 {
+ return e[0]
+ }
+ // TODO(ukai): concat literal
+ return e
+}
+func toExpr(v Value) expr {
+ if v == nil {
+ return nil
+ }
+ if e, ok := v.(expr); ok {
+ return e
+ }
+ return expr{v}
+}
+
+// varref is variable reference. e.g. ${foo}.
+type varref struct {
+ varname Value
+ paren byte
+}
+
+func (v *varref) String() string {
+ varname := v.varname.String()
+ if len(varname) == 1 && v.paren == 0 {
+ return fmt.Sprintf("$%s", varname)
+ }
+ paren := v.paren
+ if paren == 0 {
+ paren = '{'
+ }
+ return fmt.Sprintf("$%c%s%c", paren, varname, closeParen(paren))
+}
+
+func (v *varref) Eval(w evalWriter, ev *Evaluator) error {
+ te := traceEvent.begin("var", v, traceEventMain)
+ buf := newEbuf()
+ err := v.varname.Eval(buf, ev)
+ if err != nil {
+ return err
+ }
+ vv := ev.LookupVar(buf.String())
+ buf.release()
+ err = vv.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ traceEvent.end(te)
+ return nil
+}
+
+func (v *varref) serialize() serializableVar {
+ return serializableVar{
+ Type: "varref",
+ V: string(v.paren),
+ Children: []serializableVar{v.varname.serialize()},
+ }
+}
+func (v *varref) dump(d *dumpbuf) {
+ d.Byte(valueTypeVarref)
+ d.Byte(v.paren)
+ v.varname.dump(d)
+}
+
+// paramref is parameter reference e.g. $1.
+type paramref int
+
+func (p paramref) String() string {
+ return fmt.Sprintf("$%d", int(p))
+}
+
+func (p paramref) Eval(w evalWriter, ev *Evaluator) error {
+ te := traceEvent.begin("param", p, traceEventMain)
+ n := int(p)
+ if n < len(ev.paramVars) {
+ err := ev.paramVars[n].Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ } else {
+ vv := ev.LookupVar(fmt.Sprintf("%d", n))
+ err := vv.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ }
+ traceEvent.end(te)
+ return nil
+}
+
+func (p paramref) serialize() serializableVar {
+ return serializableVar{Type: "paramref", V: strconv.Itoa(int(p))}
+}
+
+func (p paramref) dump(d *dumpbuf) {
+ d.Byte(valueTypeParamref)
+ d.Int(int(p))
+}
+
+// varsubst is variable substitutaion. e.g. ${var:pat=subst}.
+type varsubst struct {
+ varname Value
+ pat Value
+ subst Value
+ paren byte
+}
+
+func (v varsubst) String() string {
+ paren := v.paren
+ if paren == 0 {
+ paren = '{'
+ }
+ return fmt.Sprintf("$%c%s:%s=%s%c", paren, v.varname, v.pat, v.subst, closeParen(paren))
+}
+
+func (v varsubst) Eval(w evalWriter, ev *Evaluator) error {
+ te := traceEvent.begin("varsubst", v, traceEventMain)
+ buf := newEbuf()
+ params, err := ev.args(buf, v.varname, v.pat, v.subst)
+ if err != nil {
+ return err
+ }
+ vname := string(params[0])
+ pat := string(params[1])
+ subst := string(params[2])
+ buf.Reset()
+ vv := ev.LookupVar(vname)
+ err = vv.Eval(buf, ev)
+ if err != nil {
+ return err
+ }
+ vals := splitSpaces(buf.String())
+ buf.release()
+ space := false
+ for _, val := range vals {
+ if space {
+ io.WriteString(w, " ")
+ }
+ io.WriteString(w, substRef(pat, subst, val))
+ space = true
+ }
+ traceEvent.end(te)
+ return nil
+}
+
+func (v varsubst) serialize() serializableVar {
+ return serializableVar{
+ Type: "varsubst",
+ V: string(v.paren),
+ Children: []serializableVar{
+ v.varname.serialize(),
+ v.pat.serialize(),
+ v.subst.serialize(),
+ },
+ }
+}
+
+func (v varsubst) dump(d *dumpbuf) {
+ d.Byte(valueTypeVarsubst)
+ d.Byte(v.paren)
+ v.varname.dump(d)
+ v.pat.dump(d)
+ v.subst.dump(d)
+}
+
+func str(buf []byte, alloc bool) Value {
+ if alloc {
+ return literal(string(buf))
+ }
+ return tmpval(buf)
+}
+
+func appendStr(exp expr, buf []byte, alloc bool) expr {
+ if len(buf) == 0 {
+ return exp
+ }
+ if len(exp) == 0 {
+ return append(exp, str(buf, alloc))
+ }
+ switch v := exp[len(exp)-1].(type) {
+ case literal:
+ v += literal(string(buf))
+ exp[len(exp)-1] = v
+ return exp
+ case tmpval:
+ v = append(v, buf...)
+ exp[len(exp)-1] = v
+ return exp
+ }
+ return append(exp, str(buf, alloc))
+}
+
+func valueNum(v Value) (int, error) {
+ switch v := v.(type) {
+ case literal, tmpval:
+ n, err := strconv.ParseInt(v.String(), 10, 64)
+ return int(n), err
+ }
+ return 0, errNotLiteral
+}
+
+type parseOp struct {
+ // alloc indicates text will be allocated as literal (string)
+ alloc bool
+
+ // matchParen matches parenthesis.
+ // note: required for func arg
+ matchParen bool
+}
+
+// parseExpr parses expression in `in` until it finds any byte in term.
+// if term is nil, it will parse to end of input.
+// if term is not nil, and it reaches to end of input, return error.
+// it returns parsed value, and parsed length `n`, so in[n-1] is any byte of
+// term, and in[n:] is next input.
+func parseExpr(in, term []byte, op parseOp) (Value, int, error) {
+ var exp expr
+ b := 0
+ i := 0
+ var saveParen byte
+ parenDepth := 0
+Loop:
+ for i < len(in) {
+ ch := in[i]
+ if term != nil && bytes.IndexByte(term, ch) >= 0 {
+ break Loop
+ }
+ switch ch {
+ case '$':
+ if i+1 >= len(in) {
+ break Loop
+ }
+ if in[i+1] == '$' {
+ exp = appendStr(exp, in[b:i+1], op.alloc)
+ i += 2
+ b = i
+ continue
+ }
+ if bytes.IndexByte(term, in[i+1]) >= 0 {
+ exp = appendStr(exp, in[b:i], op.alloc)
+ exp = append(exp, &varref{varname: literal("")})
+ i++
+ b = i
+ break Loop
+ }
+ exp = appendStr(exp, in[b:i], op.alloc)
+ v, n, err := parseDollar(in[i:], op.alloc)
+ if err != nil {
+ return nil, 0, err
+ }
+ i += n
+ b = i
+ exp = append(exp, v)
+ continue
+ case '(', '{':
+ if !op.matchParen {
+ break
+ }
+ cp := closeParen(ch)
+ if i := bytes.IndexByte(term, cp); i >= 0 {
+ parenDepth++
+ saveParen = cp
+ term[i] = 0
+ } else if cp == saveParen {
+ parenDepth++
+ }
+ case saveParen:
+ if !op.matchParen {
+ break
+ }
+ parenDepth--
+ if parenDepth == 0 {
+ i := bytes.IndexByte(term, 0)
+ term[i] = saveParen
+ saveParen = 0
+ }
+ }
+ i++
+ }
+ exp = appendStr(exp, in[b:i], op.alloc)
+ if i == len(in) && term != nil {
+ glog.Warningf("parse: unexpected end of input: %q %d [%q]", in, i, term)
+ return exp, i, errEndOfInput
+ }
+ return compactExpr(exp), i, nil
+}
+
+func closeParen(ch byte) byte {
+ switch ch {
+ case '(':
+ return ')'
+ case '{':
+ return '}'
+ }
+ return 0
+}
+
+// parseDollar parses
+// $(func expr[, expr...]) # func = literal SP
+// $(expr:expr=expr)
+// $(expr)
+// $x
+// it returns parsed value and parsed length.
+func parseDollar(in []byte, alloc bool) (Value, int, error) {
+ if len(in) <= 1 {
+ return nil, 0, errors.New("empty expr")
+ }
+ if in[0] != '$' {
+ return nil, 0, errors.New("should starts with $")
+ }
+ if in[1] == '$' {
+ return nil, 0, errors.New("should handle $$ as literal $")
+ }
+ oparen := in[1]
+ paren := closeParen(oparen)
+ if paren == 0 {
+ // $x case.
+ if in[1] >= '0' && in[1] <= '9' {
+ return paramref(in[1] - '0'), 2, nil
+ }
+ return &varref{varname: str(in[1:2], alloc)}, 2, nil
+ }
+ term := []byte{paren, ':', ' '}
+ var varname expr
+ i := 2
+ op := parseOp{alloc: alloc}
+Again:
+ for {
+ e, n, err := parseExpr(in[i:], term, op)
+ if err != nil {
+ if err == errEndOfInput {
+ // unmatched_paren2.mk
+ varname = append(varname, toExpr(e)...)
+ if len(varname) > 0 {
+ for i, vn := range varname {
+ if vr, ok := vn.(*varref); ok {
+ if vr.paren == oparen {
+ varname = varname[:i+1]
+ varname[i] = expr{literal(fmt.Sprintf("$%c", oparen)), vr.varname}
+ return &varref{varname: varname, paren: oparen}, i + 1 + n + 1, nil
+ }
+ }
+ }
+ }
+ return nil, 0, errUnterminatedVariableReference
+ }
+ return nil, 0, err
+ }
+ varname = append(varname, toExpr(e)...)
+ i += n
+ switch in[i] {
+ case paren:
+ // ${expr}
+ vname := compactExpr(varname)
+ n, err := valueNum(vname)
+ if err == nil {
+ // ${n}
+ return paramref(n), i + 1, nil
+ }
+ return &varref{varname: vname, paren: oparen}, i + 1, nil
+ case ' ':
+ // ${e ...}
+ switch token := e.(type) {
+ case literal, tmpval:
+ funcName := intern(token.String())
+ if f, ok := funcMap[funcName]; ok {
+ return parseFunc(f(), in, i+1, term[:1], funcName, op.alloc)
+ }
+ }
+ term = term[:2] // drop ' '
+ continue Again
+ case ':':
+ // ${varname:...}
+ colon := in[i : i+1]
+ var vterm []byte
+ vterm = append(vterm, term[:2]...)
+ vterm[1] = '=' // term={paren, '='}.
+ e, n, err := parseExpr(in[i+1:], vterm, op)
+ if err != nil {
+ return nil, 0, err
+ }
+ i += 1 + n
+ if in[i] == paren {
+ varname = appendStr(varname, colon, op.alloc)
+ return &varref{varname: varname, paren: oparen}, i + 1, nil
+ }
+ // ${varname:xx=...}
+ pat := e
+ subst, n, err := parseExpr(in[i+1:], term[:1], op)
+ if err != nil {
+ return nil, 0, err
+ }
+ i += 1 + n
+ // ${first:pat=e}
+ return varsubst{
+ varname: compactExpr(varname),
+ pat: pat,
+ subst: subst,
+ paren: oparen,
+ }, i + 1, nil
+ default:
+ return nil, 0, fmt.Errorf("unexpected char %c at %d in %q", in[i], i, string(in))
+ }
+ }
+}
+
+// skipSpaces skips spaces at front of `in` before any bytes in term.
+// in[n] will be the first non white space in in.
+func skipSpaces(in, term []byte) int {
+ for i := 0; i < len(in); i++ {
+ if bytes.IndexByte(term, in[i]) >= 0 {
+ return i
+ }
+ switch in[i] {
+ case ' ', '\t':
+ default:
+ return i
+ }
+ }
+ return len(in)
+}
+
+// trimLiteralSpace trims literal space around v.
+func trimLiteralSpace(v Value) Value {
+ switch v := v.(type) {
+ case literal:
+ return literal(strings.TrimSpace(string(v)))
+ case tmpval:
+ b := bytes.TrimSpace([]byte(v))
+ if len(b) == 0 {
+ return literal("")
+ }
+ return tmpval(b)
+ case expr:
+ if len(v) == 0 {
+ return v
+ }
+ switch s := v[0].(type) {
+ case literal, tmpval:
+ t := trimLiteralSpace(s)
+ if t == literal("") {
+ v = v[1:]
+ } else {
+ v[0] = t
+ }
+ }
+ switch s := v[len(v)-1].(type) {
+ case literal, tmpval:
+ t := trimLiteralSpace(s)
+ if t == literal("") {
+ v = v[:len(v)-1]
+ } else {
+ v[len(v)-1] = t
+ }
+ }
+ return compactExpr(v)
+ }
+ return v
+}
+
+// concatLine concatinates line with "\\\n" in function expression.
+// TODO(ukai): less alloc?
+func concatLine(v Value) Value {
+ switch v := v.(type) {
+ case literal:
+ for {
+ s := string(v)
+ i := strings.Index(s, "\\\n")
+ if i < 0 {
+ return v
+ }
+ v = literal(s[:i] + strings.TrimLeft(s[i+2:], " \t"))
+ }
+ case tmpval:
+ for {
+ b := []byte(v)
+ i := bytes.Index(b, []byte{'\\', '\n'})
+ if i < 0 {
+ return v
+ }
+ var buf bytes.Buffer
+ buf.Write(b[:i])
+ buf.Write(bytes.TrimLeft(b[i+2:], " \t"))
+ v = tmpval(buf.Bytes())
+ }
+ case expr:
+ for i := range v {
+ switch vv := v[i].(type) {
+ case literal, tmpval:
+ v[i] = concatLine(vv)
+ }
+ }
+ return v
+ }
+ return v
+}
+
+// parseFunc parses function arguments from in[s:] for f.
+// in[0] is '$' and in[s] is space just after func name.
+// in[:n] will be "${func args...}"
+func parseFunc(f mkFunc, in []byte, s int, term []byte, funcName string, alloc bool) (Value, int, error) {
+ f.AddArg(str(in[1:s-1], alloc))
+ arity := f.Arity()
+ term = append(term, ',')
+ i := skipSpaces(in[s:], term)
+ i = s + i
+ if i == len(in) {
+ return f, i, nil
+ }
+ narg := 1
+ op := parseOp{alloc: alloc, matchParen: true}
+ for {
+ if arity != 0 && narg >= arity {
+ // final arguments.
+ term = term[:1] // drop ','
+ }
+ v, n, err := parseExpr(in[i:], term, op)
+ if err != nil {
+ if err == errEndOfInput {
+ return nil, 0, fmt.Errorf("*** unterminated call to function `%s': missing `)'.", funcName)
+ }
+ return nil, 0, err
+ }
+ v = concatLine(v)
+ // TODO(ukai): do this in funcIf, funcAnd, or funcOr's compactor?
+ if (narg == 1 && funcName == "if") || funcName == "and" || funcName == "or" {
+ v = trimLiteralSpace(v)
+ }
+ f.AddArg(v)
+ i += n
+ narg++
+ if in[i] == term[0] {
+ i++
+ break
+ }
+ i++ // should be ','
+ if i == len(in) {
+ break
+ }
+ }
+ var fv Value
+ fv = f
+ if compactor, ok := f.(compactor); ok {
+ fv = compactor.Compact()
+ }
+ if EvalStatsFlag || traceEvent.enabled() {
+ fv = funcstats{
+ Value: fv,
+ str: fv.String(),
+ }
+
+ }
+ return fv, i, nil
+}
+
+type compactor interface {
+ Compact() Value
+}
+
+type funcstats struct {
+ Value
+ str string
+}
+
+func (f funcstats) Eval(w evalWriter, ev *Evaluator) error {
+ te := traceEvent.begin("func", literal(f.str), traceEventMain)
+ err := f.Value.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ // TODO(ukai): per functype?
+ traceEvent.end(te)
+ return nil
+}
+
+type matcherValue struct{}
+
+func (m matcherValue) Eval(w evalWriter, ev *Evaluator) error {
+ return fmt.Errorf("couldn't eval matcher")
+}
+func (m matcherValue) serialize() serializableVar {
+ return serializableVar{Type: ""}
+}
+
+func (m matcherValue) dump(d *dumpbuf) {
+ d.err = fmt.Errorf("couldn't dump matcher")
+}
+
+type matchVarref struct{ matcherValue }
+
+func (m matchVarref) String() string { return "$(match-any)" }
+
+type literalRE struct {
+ matcherValue
+ *regexp.Regexp
+}
+
+func mustLiteralRE(s string) literalRE {
+ return literalRE{
+ Regexp: regexp.MustCompile(s),
+ }
+}
+
+func (r literalRE) String() string { return r.Regexp.String() }
+
+func matchValue(exp, pat Value) bool {
+ switch pat := pat.(type) {
+ case literal:
+ return literal(exp.String()) == pat
+ }
+ // TODO: other type match?
+ return false
+}
+
+func matchExpr(exp, pat expr) ([]Value, bool) {
+ if len(exp) != len(pat) {
+ return nil, false
+ }
+ var mv matchVarref
+ var matches []Value
+ for i := range exp {
+ if pat[i] == mv {
+ switch exp[i].(type) {
+ case paramref, *varref:
+ matches = append(matches, exp[i])
+ continue
+ }
+ return nil, false
+ }
+ if patre, ok := pat[i].(literalRE); ok {
+ re := patre.Regexp
+ m := re.FindStringSubmatch(exp[i].String())
+ if m == nil {
+ return nil, false
+ }
+ for _, sm := range m[1:] {
+ matches = append(matches, literal(sm))
+ }
+ continue
+ }
+ if !matchValue(exp[i], pat[i]) {
+ return nil, false
+ }
+ }
+ return matches, true
+}
diff --git a/golang/kati/expr_test.go b/golang/kati/expr_test.go
new file mode 100644
index 0000000..ad51a89
--- /dev/null
+++ b/golang/kati/expr_test.go
@@ -0,0 +1,308 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "reflect"
+ "testing"
+)
+
+func TestParseExpr(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ val Value
+ isErr bool
+ }{
+ {
+ in: "foo",
+ val: literal("foo"),
+ },
+ {
+ in: "(foo)",
+ val: literal("(foo)"),
+ },
+ {
+ in: "{foo}",
+ val: literal("{foo}"),
+ },
+ {
+ in: "$$",
+ val: literal("$"),
+ },
+ {
+ in: "foo$$bar",
+ val: literal("foo$bar"),
+ },
+ {
+ in: "$foo",
+ val: expr{&varref{varname: literal("f")}, literal("oo")},
+ },
+ {
+ in: "$(foo)",
+ val: &varref{varname: literal("foo"), paren: '('},
+ },
+ {
+ in: "$(foo:.c=.o)",
+ val: varsubst{
+ varname: literal("foo"),
+ pat: literal(".c"),
+ subst: literal(".o"),
+ paren: '(',
+ },
+ },
+ {
+ in: "$(subst $(space),$(,),$(foo))/bar",
+ val: expr{
+ &funcSubst{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(subst"),
+ &varref{
+ varname: literal("space"),
+ paren: '(',
+ },
+ &varref{
+ varname: literal(","),
+ paren: '(',
+ },
+ &varref{
+ varname: literal("foo"),
+ paren: '(',
+ },
+ },
+ },
+ },
+ literal("/bar"),
+ },
+ },
+ {
+ in: "$(subst $(space),$,,$(foo))",
+ val: &funcSubst{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(subst"),
+ &varref{
+ varname: literal("space"),
+ paren: '(',
+ },
+ &varref{
+ varname: literal(""),
+ },
+ expr{
+ literal(","),
+ &varref{
+ varname: literal("foo"),
+ paren: '(',
+ },
+ },
+ },
+ },
+ },
+ },
+ {
+ in: `$(shell echo '()')`,
+ val: &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(shell"),
+ literal("echo '()'"),
+ },
+ },
+ },
+ },
+ {
+ in: `${shell echo '()'}`,
+ val: &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("{shell"),
+ literal("echo '()'"),
+ },
+ },
+ },
+ },
+ {
+ in: `$(shell echo ')')`,
+ val: expr{
+ &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(shell"),
+ literal("echo '"),
+ },
+ },
+ },
+ literal("')"),
+ },
+ },
+ {
+ in: `${shell echo ')'}`,
+ val: &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("{shell"),
+ literal("echo ')'"),
+ },
+ },
+ },
+ },
+ {
+ in: `${shell echo '}'}`,
+ val: expr{
+ &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("{shell"),
+ literal("echo '"),
+ },
+ },
+ },
+ literal("'}"),
+ },
+ },
+ {
+ in: `$(shell make --version | ruby -n0e 'puts $$_[/Make (\d)/,1]')`,
+ val: &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(shell"),
+ literal(`make --version | ruby -n0e 'puts $_[/Make (\d)/,1]'`),
+ },
+ },
+ },
+ },
+ {
+ in: `$(and ${TRUE}, $(X) )`,
+ val: &funcAnd{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(and"),
+ &varref{
+ varname: literal("TRUE"),
+ paren: '{',
+ },
+ &varref{
+ varname: literal("X"),
+ paren: '(',
+ },
+ },
+ },
+ },
+ },
+ {
+ in: `$(call func, \
+ foo)`,
+ val: &funcCall{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(call"),
+ literal("func"),
+ literal(" foo"),
+ },
+ },
+ },
+ },
+ {
+ in: `$(call func, \)`,
+ val: &funcCall{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(call"),
+ literal("func"),
+ literal(` \`),
+ },
+ },
+ },
+ },
+ {
+ in: `$(eval ## comment)`,
+ val: &funcNop{
+ expr: `$(eval ## comment)`,
+ },
+ },
+ {
+ in: `$(eval foo = bar)`,
+ val: &funcEvalAssign{
+ lhs: "foo",
+ op: "=",
+ rhs: literal("bar"),
+ },
+ },
+ {
+ in: `$(eval foo :=)`,
+ val: &funcEvalAssign{
+ lhs: "foo",
+ op: ":=",
+ rhs: literal(""),
+ },
+ },
+ {
+ in: `$(eval foo := $(bar))`,
+ val: &funcEvalAssign{
+ lhs: "foo",
+ op: ":=",
+ rhs: &varref{
+ varname: literal("bar"),
+ paren: '(',
+ },
+ },
+ },
+ {
+ in: `$(eval foo := $$(bar))`,
+ val: &funcEvalAssign{
+ lhs: "foo",
+ op: ":=",
+ rhs: literal("$(bar)"),
+ },
+ },
+ {
+ in: `$(strip $1)`,
+ val: &funcStrip{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(strip"),
+ paramref(1),
+ },
+ },
+ },
+ },
+ {
+ in: `$(strip $(1))`,
+ val: &funcStrip{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(strip"),
+ paramref(1),
+ },
+ },
+ },
+ },
+ } {
+ val, _, err := parseExpr([]byte(tc.in), nil, parseOp{alloc: true})
+ if tc.isErr {
+ if err == nil {
+ t.Errorf(`parseExpr(%q)=_, _, nil; want error`, tc.in)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf(`parseExpr(%q)=_, _, %v; want nil error`, tc.in, err)
+ continue
+ }
+ if got, want := val, tc.val; !reflect.DeepEqual(got, want) {
+ t.Errorf("parseExpr(%[1]q)=%[2]q %#[2]v, _, _;\n want %[3]q %#[3]v, _, _", tc.in, got, want)
+ }
+ }
+}
diff --git a/golang/kati/fileutil.go b/golang/kati/fileutil.go
new file mode 100644
index 0000000..feef812
--- /dev/null
+++ b/golang/kati/fileutil.go
@@ -0,0 +1,62 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "os"
+ "path/filepath"
+)
+
+func exists(filename string) bool {
+ _, err := os.Stat(filename)
+ if os.IsNotExist(err) {
+ return false
+ }
+ return true
+}
+
+type vpath struct {
+ pattern string
+ dirs []string
+}
+
+type searchPaths struct {
+ vpaths []vpath // vpath directives
+ dirs []string // VPATH variable
+}
+
+func (s searchPaths) exists(target string) (string, bool) {
+ if exists(target) {
+ return target, true
+ }
+ for _, vpath := range s.vpaths {
+ if !matchPattern(vpath.pattern, target) {
+ continue
+ }
+ for _, dir := range vpath.dirs {
+ vtarget := filepath.Join(dir, target)
+ if exists(vtarget) {
+ return vtarget, true
+ }
+ }
+ }
+ for _, dir := range s.dirs {
+ vtarget := filepath.Join(dir, target)
+ if exists(vtarget) {
+ return vtarget, true
+ }
+ }
+ return target, false
+}
diff --git a/golang/kati/flags.go b/golang/kati/flags.go
new file mode 100644
index 0000000..f2353a2
--- /dev/null
+++ b/golang/kati/flags.go
@@ -0,0 +1,29 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+// Flags to control kati.
+var (
+ StatsFlag bool
+ PeriodicStatsFlag bool
+ EvalStatsFlag bool
+
+ DryRunFlag bool
+
+ UseFindEmulator bool
+ UseShellBuiltins bool
+
+ IgnoreOptionalInclude string
+)
diff --git a/golang/kati/func.go b/golang/kati/func.go
new file mode 100644
index 0000000..bdc560d
--- /dev/null
+++ b/golang/kati/func.go
@@ -0,0 +1,1464 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+// mkFunc is a make function.
+// http://www.gnu.org/software/make/manual/make.html#Functions
+
+// mkFunc is make builtin function.
+type mkFunc interface {
+ // Arity is max function's arity.
+ // ',' will not be handled as argument separator more than arity.
+ // 0 means varargs.
+ Arity() int
+
+ // AddArg adds value as an argument.
+ // the first argument will be "(funcname", or "{funcname".
+ AddArg(Value)
+
+ Value
+}
+
+var (
+ funcMap = map[string]func() mkFunc{
+ "patsubst": func() mkFunc { return &funcPatsubst{} },
+ "strip": func() mkFunc { return &funcStrip{} },
+ "subst": func() mkFunc { return &funcSubst{} },
+ "findstring": func() mkFunc { return &funcFindstring{} },
+ "filter": func() mkFunc { return &funcFilter{} },
+ "filter-out": func() mkFunc { return &funcFilterOut{} },
+ "sort": func() mkFunc { return &funcSort{} },
+ "word": func() mkFunc { return &funcWord{} },
+ "wordlist": func() mkFunc { return &funcWordlist{} },
+ "words": func() mkFunc { return &funcWords{} },
+ "firstword": func() mkFunc { return &funcFirstword{} },
+ "lastword": func() mkFunc { return &funcLastword{} },
+
+ "join": func() mkFunc { return &funcJoin{} },
+ "wildcard": func() mkFunc { return &funcWildcard{} },
+ "dir": func() mkFunc { return &funcDir{} },
+ "notdir": func() mkFunc { return &funcNotdir{} },
+ "suffix": func() mkFunc { return &funcSuffix{} },
+ "basename": func() mkFunc { return &funcBasename{} },
+ "addsuffix": func() mkFunc { return &funcAddsuffix{} },
+ "addprefix": func() mkFunc { return &funcAddprefix{} },
+ "realpath": func() mkFunc { return &funcRealpath{} },
+ "abspath": func() mkFunc { return &funcAbspath{} },
+
+ "if": func() mkFunc { return &funcIf{} },
+ "and": func() mkFunc { return &funcAnd{} },
+ "or": func() mkFunc { return &funcOr{} },
+
+ "value": func() mkFunc { return &funcValue{} },
+
+ "eval": func() mkFunc { return &funcEval{} },
+
+ "shell": func() mkFunc { return &funcShell{} },
+ "call": func() mkFunc { return &funcCall{} },
+ "foreach": func() mkFunc { return &funcForeach{} },
+
+ "origin": func() mkFunc { return &funcOrigin{} },
+ "flavor": func() mkFunc { return &funcFlavor{} },
+ "info": func() mkFunc { return &funcInfo{} },
+ "warning": func() mkFunc { return &funcWarning{} },
+ "error": func() mkFunc { return &funcError{} },
+ }
+)
+
+type arityError struct {
+ narg int
+ name string
+}
+
+func (e arityError) Error() string {
+ return fmt.Sprintf("*** insufficient number of arguments (%d) to function `%s'.", e.narg, e.name)
+}
+
+func assertArity(name string, req, n int) error {
+ if n-1 < req {
+ return arityError{narg: n - 1, name: name}
+ }
+ return nil
+}
+
+func numericValueForFunc(v string) (int, bool) {
+ n, err := strconv.Atoi(v)
+ if err != nil || n < 0 {
+ return n, false
+ }
+ return n, true
+}
+
+func formatCommandOutput(out []byte) []byte {
+ out = bytes.TrimRight(out, "\n")
+ out = bytes.Replace(out, []byte{'\n'}, []byte{' '}, -1)
+ return out
+}
+
+type fclosure struct {
+ // args[0] is "(funcname", or "{funcname".
+ args []Value
+}
+
+func (c *fclosure) AddArg(v Value) {
+ c.args = append(c.args, v)
+}
+
+func (c *fclosure) String() string {
+ if len(c.args) == 0 {
+ return "$(func)"
+ }
+ arg0 := c.args[0].String()
+ if arg0 == "" {
+ return "$(func )"
+ }
+ cp := closeParen(arg0[0])
+ if cp == 0 {
+ return "${func }"
+ }
+ var args []string
+ for _, arg := range c.args[1:] {
+ args = append(args, arg.String())
+ }
+ return fmt.Sprintf("$%s %s%c", arg0, strings.Join(args, ","), cp)
+}
+
+func (c *fclosure) serialize() serializableVar {
+ r := serializableVar{Type: "func"}
+ for _, a := range c.args {
+ r.Children = append(r.Children, a.serialize())
+ }
+ return r
+}
+
+func (c *fclosure) dump(d *dumpbuf) {
+ d.Byte(valueTypeFunc)
+ for _, a := range c.args {
+ a.dump(d)
+ }
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Text-Functions
+type funcSubst struct{ fclosure }
+
+func (f *funcSubst) Arity() int { return 3 }
+func (f *funcSubst) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("subst", 3, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.args[1:]...)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ from := fargs[0]
+ to := fargs[1]
+ text := fargs[2]
+ glog.V(1).Infof("subst from:%q to:%q text:%q", from, to, text)
+ if len(from) == 0 {
+ w.Write(text)
+ w.Write(to)
+ } else {
+ w.Write(bytes.Replace(text, from, to, -1))
+ }
+ abuf.release()
+ stats.add("funcbody", "subst", t)
+ return nil
+}
+
+type funcPatsubst struct{ fclosure }
+
+func (f *funcPatsubst) Arity() int { return 3 }
+func (f *funcPatsubst) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("patsubst", 3, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.args[1], f.args[2])
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[3].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ pat := fargs[0]
+ repl := fargs[1]
+ for _, word := range wb.words {
+ pre, subst, post := substPatternBytes(pat, repl, word)
+ var sword []byte
+ sword = append(sword, pre...)
+ if subst != nil {
+ sword = append(sword, subst...)
+ sword = append(sword, post...)
+ }
+ w.writeWord(sword)
+ }
+ abuf.release()
+ wb.release()
+ stats.add("funcbody", "patsubst", t)
+ return nil
+}
+
+type funcStrip struct{ fclosure }
+
+func (f *funcStrip) Arity() int { return 1 }
+func (f *funcStrip) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("strip", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ w.writeWord(word)
+ }
+ wb.release()
+ stats.add("funcbody", "strip", t)
+ return nil
+}
+
+type funcFindstring struct{ fclosure }
+
+func (f *funcFindstring) Arity() int { return 2 }
+func (f *funcFindstring) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("findstring", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.args[1:]...)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ find := fargs[0]
+ text := fargs[1]
+ if bytes.Index(text, find) >= 0 {
+ w.Write(find)
+ }
+ abuf.release()
+ stats.add("funcbody", "findstring", t)
+ return nil
+}
+
+type funcFilter struct{ fclosure }
+
+func (f *funcFilter) Arity() int { return 2 }
+func (f *funcFilter) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("filter", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ patternsBuffer := newWbuf()
+ err = f.args[1].Eval(patternsBuffer, ev)
+ if err != nil {
+ return err
+ }
+ textBuffer := newWbuf()
+ err = f.args[2].Eval(textBuffer, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, text := range textBuffer.words {
+ for _, pat := range patternsBuffer.words {
+ if matchPatternBytes(pat, text) {
+ w.writeWord(text)
+ }
+ }
+ }
+ patternsBuffer.release()
+ textBuffer.release()
+ stats.add("funcbody", "filter", t)
+ return nil
+}
+
+type funcFilterOut struct{ fclosure }
+
+func (f *funcFilterOut) Arity() int { return 2 }
+func (f *funcFilterOut) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("filter-out", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ patternsBuffer := newWbuf()
+ err = f.args[1].Eval(patternsBuffer, ev)
+ if err != nil {
+ return err
+ }
+ textBuffer := newWbuf()
+ err = f.args[2].Eval(textBuffer, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+Loop:
+ for _, text := range textBuffer.words {
+ for _, pat := range patternsBuffer.words {
+ if matchPatternBytes(pat, text) {
+ continue Loop
+ }
+ }
+ w.writeWord(text)
+ }
+ patternsBuffer.release()
+ textBuffer.release()
+ stats.add("funcbody", "filter-out", t)
+ return err
+}
+
+type funcSort struct{ fclosure }
+
+func (f *funcSort) Arity() int { return 1 }
+func (f *funcSort) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("sort", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ var toks []string
+ for _, tok := range wb.words {
+ toks = append(toks, string(tok))
+ }
+ wb.release()
+ sort.Strings(toks)
+
+ // Remove duplicate words.
+ var prev string
+ for _, tok := range toks {
+ if prev == tok {
+ continue
+ }
+ w.writeWordString(tok)
+ prev = tok
+ }
+ stats.add("funcbody", "sort", t)
+ return nil
+}
+
+type funcWord struct{ fclosure }
+
+func (f *funcWord) Arity() int { return 2 }
+func (f *funcWord) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("word", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ v := string(trimSpaceBytes(abuf.Bytes()))
+ abuf.release()
+ index, ok := numericValueForFunc(v)
+ if !ok {
+ return ev.errorf(`*** non-numeric first argument to "word" function: %q.`, v)
+ }
+ if index == 0 {
+ return ev.errorf(`*** first argument to "word" function must be greater than 0.`)
+ }
+ wb := newWbuf()
+ err = f.args[2].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ if index-1 < len(wb.words) {
+ w.writeWord(wb.words[index-1])
+ }
+ wb.release()
+ stats.add("funcbody", "word", t)
+ return err
+}
+
+type funcWordlist struct{ fclosure }
+
+func (f *funcWordlist) Arity() int { return 3 }
+func (f *funcWordlist) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("wordlist", 3, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.args[1], f.args[2])
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ v := string(trimSpaceBytes(fargs[0]))
+ si, ok := numericValueForFunc(v)
+ if !ok {
+ return ev.errorf(`*** non-numeric first argument to "wordlist" function: %q.`, v)
+ }
+ if si == 0 {
+ return ev.errorf(`*** invalid first argument to "wordlist" function: %s`, f.args[1])
+ }
+ v = string(trimSpaceBytes(fargs[1]))
+ ei, ok := numericValueForFunc(v)
+ if !ok {
+ return ev.errorf(`*** non-numeric second argument to "wordlist" function: %q.`, v)
+ }
+ abuf.release()
+
+ wb := newWbuf()
+ err = f.args[3].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ for i, word := range wb.words {
+ if si <= i+1 && i+1 <= ei {
+ w.writeWord(word)
+ }
+ }
+ wb.release()
+ stats.add("funcbody", "wordlist", t)
+ return nil
+}
+
+type funcWords struct{ fclosure }
+
+func (f *funcWords) Arity() int { return 1 }
+func (f *funcWords) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("words", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ n := len(wb.words)
+ wb.release()
+ w.writeWordString(strconv.Itoa(n))
+ stats.add("funcbody", "words", t)
+ return nil
+}
+
+type funcFirstword struct{ fclosure }
+
+func (f *funcFirstword) Arity() int { return 1 }
+func (f *funcFirstword) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("firstword", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ if len(wb.words) > 0 {
+ w.writeWord(wb.words[0])
+ }
+ wb.release()
+ stats.add("funcbody", "firstword", t)
+ return nil
+}
+
+type funcLastword struct{ fclosure }
+
+func (f *funcLastword) Arity() int { return 1 }
+func (f *funcLastword) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("lastword", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ if len(wb.words) > 0 {
+ w.writeWord(wb.words[len(wb.words)-1])
+ }
+ wb.release()
+ stats.add("funcbody", "lastword", t)
+ return err
+}
+
+// https://www.gnu.org/software/make/manual/html_node/File-Name-Functions.html#File-Name-Functions
+
+type funcJoin struct{ fclosure }
+
+func (f *funcJoin) Arity() int { return 2 }
+func (f *funcJoin) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("join", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb1 := newWbuf()
+ err = f.args[1].Eval(wb1, ev)
+ if err != nil {
+ return err
+ }
+ wb2 := newWbuf()
+ err = f.args[2].Eval(wb2, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for i := 0; i < len(wb1.words) || i < len(wb2.words); i++ {
+ var word []byte
+ if i < len(wb1.words) {
+ word = append(word, wb1.words[i]...)
+ }
+ if i < len(wb2.words) {
+ word = append(word, wb2.words[i]...)
+ }
+ w.writeWord(word)
+ }
+ wb1.release()
+ wb2.release()
+ stats.add("funcbody", "join", t)
+ return nil
+}
+
+type funcWildcard struct{ fclosure }
+
+func (f *funcWildcard) Arity() int { return 1 }
+func (f *funcWildcard) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("wildcard", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ te := traceEvent.begin("wildcard", tmpval(wb.Bytes()), traceEventMain)
+ // Note GNU make does not delay the execution of $(wildcard) so we
+ // do not need to check avoid_io here.
+ t := time.Now()
+ for _, word := range wb.words {
+ pat := string(word)
+ err = wildcard(w, pat)
+ if err != nil {
+ return err
+ }
+ }
+ wb.release()
+ traceEvent.end(te)
+ stats.add("funcbody", "wildcard", t)
+ return nil
+}
+
+type funcDir struct{ fclosure }
+
+func (f *funcDir) Arity() int { return 1 }
+func (f *funcDir) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("dir", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ name := filepath.Dir(string(word))
+ if name == "/" {
+ w.writeWordString(name)
+ continue
+ }
+ w.writeWordString(name + string(filepath.Separator))
+ }
+ wb.release()
+ stats.add("funcbody", "dir", t)
+ return nil
+}
+
+type funcNotdir struct{ fclosure }
+
+func (f *funcNotdir) Arity() int { return 1 }
+func (f *funcNotdir) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("notdir", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ name := string(word)
+ if name == string(filepath.Separator) {
+ w.writeWord([]byte{}) // separator
+ continue
+ }
+ w.writeWordString(filepath.Base(name))
+ }
+ wb.release()
+ stats.add("funcbody", "notdir", t)
+ return nil
+}
+
+type funcSuffix struct{ fclosure }
+
+func (f *funcSuffix) Arity() int { return 1 }
+func (f *funcSuffix) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("suffix", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ tok := string(word)
+ e := filepath.Ext(tok)
+ if len(e) > 0 {
+ w.writeWordString(e)
+ }
+ }
+ wb.release()
+ stats.add("funcbody", "suffix", t)
+ return err
+}
+
+type funcBasename struct{ fclosure }
+
+func (f *funcBasename) Arity() int { return 1 }
+func (f *funcBasename) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("basename", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ tok := string(word)
+ e := stripExt(tok)
+ w.writeWordString(e)
+ }
+ wb.release()
+ stats.add("funcbody", "basename", t)
+ return nil
+}
+
+type funcAddsuffix struct{ fclosure }
+
+func (f *funcAddsuffix) Arity() int { return 2 }
+func (f *funcAddsuffix) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("addsuffix", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[2].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ suf := abuf.Bytes()
+ for _, word := range wb.words {
+ var name []byte
+ name = append(name, word...)
+ name = append(name, suf...)
+ w.writeWord(name)
+ }
+ wb.release()
+ abuf.release()
+ stats.add("funcbody", "addsuffix", t)
+ return err
+}
+
+type funcAddprefix struct{ fclosure }
+
+func (f *funcAddprefix) Arity() int { return 2 }
+func (f *funcAddprefix) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("addprefix", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ pre := abuf.Bytes()
+ wb := newWbuf()
+ err = f.args[2].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ var name []byte
+ name = append(name, pre...)
+ name = append(name, word...)
+ w.writeWord(name)
+ }
+ wb.release()
+ abuf.release()
+ stats.add("funcbody", "addprefix", t)
+ return err
+}
+
+type funcRealpath struct{ fclosure }
+
+func (f *funcRealpath) Arity() int { return 1 }
+func (f *funcRealpath) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("realpath", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ if ev.avoidIO {
+ fmt.Fprintf(w, "$(realpath %s 2>/dev/null)", string(wb.Bytes()))
+ ev.hasIO = true
+ wb.release()
+ return nil
+ }
+
+ t := time.Now()
+ for _, word := range wb.words {
+ name := string(word)
+ name, err := filepath.Abs(name)
+ if err != nil {
+ glog.Warningf("abs %q: %v", name, err)
+ continue
+ }
+ name, err = filepath.EvalSymlinks(name)
+ if err != nil {
+ glog.Warningf("realpath %q: %v", name, err)
+ continue
+ }
+ w.writeWordString(name)
+ }
+ wb.release()
+ stats.add("funcbody", "realpath", t)
+ return err
+}
+
+type funcAbspath struct{ fclosure }
+
+func (f *funcAbspath) Arity() int { return 1 }
+func (f *funcAbspath) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("abspath", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ wb := newWbuf()
+ err = f.args[1].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ t := time.Now()
+ for _, word := range wb.words {
+ name := string(word)
+ name, err := filepath.Abs(name)
+ if err != nil {
+ glog.Warningf("abs %q: %v", name, err)
+ continue
+ }
+ w.writeWordString(name)
+ }
+ wb.release()
+ stats.add("funcbody", "abspath", t)
+ return nil
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Conditional-Functions
+type funcIf struct{ fclosure }
+
+func (f *funcIf) Arity() int { return 3 }
+func (f *funcIf) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("if", 2, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ if len(abuf.Bytes()) != 0 {
+ abuf.release()
+ return f.args[2].Eval(w, ev)
+ }
+ abuf.release()
+ if len(f.args) > 3 {
+ return f.args[3].Eval(w, ev)
+ }
+ return nil
+}
+
+type funcAnd struct{ fclosure }
+
+func (f *funcAnd) Arity() int { return 0 }
+func (f *funcAnd) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("and", 0, len(f.args))
+ if err != nil {
+ return nil
+ }
+ abuf := newEbuf()
+ var cond []byte
+ for _, arg := range f.args[1:] {
+ abuf.Reset()
+ err = arg.Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ cond = abuf.Bytes()
+ if len(cond) == 0 {
+ abuf.release()
+ return nil
+ }
+ }
+ w.Write(cond)
+ abuf.release()
+ return nil
+}
+
+type funcOr struct{ fclosure }
+
+func (f *funcOr) Arity() int { return 0 }
+func (f *funcOr) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("or", 0, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ for _, arg := range f.args[1:] {
+ abuf.Reset()
+ err = arg.Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ cond := abuf.Bytes()
+ if len(cond) != 0 {
+ w.Write(cond)
+ abuf.release()
+ return nil
+ }
+ }
+ abuf.release()
+ return nil
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Shell-Function
+type funcShell struct{ fclosure }
+
+func (f *funcShell) Arity() int { return 1 }
+
+// A hack for Android build. We need to evaluate things like $((3+4))
+// when we emit ninja file, because the result of such expressions
+// will be passed to other make functions.
+// TODO: Maybe we should modify Android's Makefile and remove this
+// workaround. It would be also nice if we can detect things like
+// this.
+func hasNoIoInShellScript(s []byte) bool {
+ if len(s) == 0 {
+ return true
+ }
+ if !bytes.HasPrefix(s, []byte("echo $((")) || s[len(s)-1] != ')' {
+ return false
+ }
+ glog.Infof("has no IO - evaluate now: %s", s)
+ return true
+}
+
+func (f *funcShell) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("shell", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ if ev.avoidIO && !hasNoIoInShellScript(abuf.Bytes()) {
+ te := traceEvent.begin("shell", tmpval(abuf.Bytes()), traceEventMain)
+ ev.hasIO = true
+ io.WriteString(w, "$(")
+ w.Write(abuf.Bytes())
+ writeByte(w, ')')
+ traceEvent.end(te)
+ abuf.release()
+ return nil
+ }
+ arg := abuf.String()
+ abuf.release()
+ if bc, err := parseBuiltinCommand(arg); err != nil {
+ glog.V(1).Infof("sh builtin: %v", err)
+ } else {
+ glog.Info("use sh builtin:", arg)
+ glog.V(2).Infof("builtin command: %#v", bc)
+ te := traceEvent.begin("sh-builtin", literal(arg), traceEventMain)
+ bc.run(w)
+ traceEvent.end(te)
+ return nil
+ }
+
+ shellVar, err := ev.EvaluateVar("SHELL")
+ if err != nil {
+ return err
+ }
+ cmdline := []string{shellVar, "-c", arg}
+ if glog.V(1) {
+ glog.Infof("shell %q", cmdline)
+ }
+ cmd := exec.Cmd{
+ Path: cmdline[0],
+ Args: cmdline,
+ Stderr: os.Stderr,
+ }
+ te := traceEvent.begin("shell", literal(arg), traceEventMain)
+ out, err := cmd.Output()
+ shellStats.add(time.Since(te.t))
+ if err != nil {
+ glog.Warningf("$(shell %q) failed: %q", arg, err)
+ }
+ w.Write(formatCommandOutput(out))
+ traceEvent.end(te)
+ return nil
+}
+
+func (f *funcShell) Compact() Value {
+ if len(f.args)-1 < 1 {
+ return f
+ }
+ if !UseShellBuiltins {
+ return f
+ }
+
+ var exp expr
+ switch v := f.args[1].(type) {
+ case expr:
+ exp = v
+ default:
+ exp = expr{v}
+ }
+ if UseShellBuiltins {
+ // hack for android
+ for _, sb := range shBuiltins {
+ if v, ok := matchExpr(exp, sb.pattern); ok {
+ glog.Infof("shell compact apply %s for %s", sb.name, exp)
+ return sb.compact(f, v)
+ }
+ }
+ glog.V(1).Infof("shell compact no match: %s", exp)
+ }
+ return f
+}
+
+// https://www.gnu.org/software/make/manual/html_node/Call-Function.html#Call-Function
+type funcCall struct{ fclosure }
+
+func (f *funcCall) Arity() int { return 0 }
+
+func (f *funcCall) Eval(w evalWriter, ev *Evaluator) error {
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.args[1:]...)
+ if err != nil {
+ return err
+ }
+ varname := fargs[0]
+ variable := string(varname)
+ te := traceEvent.begin("call", literal(variable), traceEventMain)
+ if glog.V(1) {
+ glog.Infof("call %q variable %q", f.args[1], variable)
+ }
+ v := ev.LookupVar(variable)
+ // Evalualte all arguments first before we modify the table.
+ // An omitted argument should be blank, even if it's nested inside
+ // another call statement that did have that argument passed.
+ // see testcases/nested_call.mk
+ arglen := len(ev.paramVars)
+ if arglen == 0 {
+ arglen++
+ }
+ if arglen < len(fargs[1:])+1 {
+ arglen = len(fargs[1:]) + 1
+ }
+ args := make([]tmpval, arglen)
+ // $0 is variable.
+ args[0] = tmpval(varname)
+ // TODO(ukai): If variable is the name of a built-in function,
+ // the built-in function is always invoked (even if a make variable
+ // by that name also exists).
+
+ for i, arg := range fargs[1:] {
+ // f.args[2]=>args[1] will be $1.
+ args[i+1] = tmpval(arg)
+ if glog.V(1) {
+ glog.Infof("call $%d: %q=>%q", i+1, arg, fargs[i+1])
+ }
+ }
+ oldParams := ev.paramVars
+ ev.paramVars = args
+
+ var buf bytes.Buffer
+ if glog.V(1) {
+ w = &ssvWriter{Writer: io.MultiWriter(w, &buf)}
+ }
+ err = v.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ ev.paramVars = oldParams
+ traceEvent.end(te)
+ if glog.V(1) {
+ glog.Infof("call %q variable %q return %q", f.args[1], variable, buf.Bytes())
+ }
+ abuf.release()
+ return nil
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Value-Function
+type funcValue struct{ fclosure }
+
+func (f *funcValue) Arity() int { return 1 }
+func (f *funcValue) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("value", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ v := ev.LookupVar(abuf.String())
+ abuf.release()
+ io.WriteString(w, v.String())
+ return nil
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Eval-Function
+type funcEval struct{ fclosure }
+
+func (f *funcEval) Arity() int { return 1 }
+func (f *funcEval) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("eval", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ s := abuf.Bytes()
+ glog.V(1).Infof("eval %v=>%q at %s", f.args[1], s, ev.srcpos)
+ mk, err := parseMakefileBytes(trimSpaceBytes(s), ev.srcpos)
+ if err != nil {
+ return ev.errorf("%v", err)
+ }
+
+ for _, stmt := range mk.stmts {
+ err = ev.eval(stmt)
+ if err != nil {
+ return err
+ }
+ }
+ abuf.release()
+ return nil
+}
+
+func (f *funcEval) Compact() Value {
+ if len(f.args)-1 < 1 {
+ return f
+ }
+ switch arg := f.args[1].(type) {
+ case literal, tmpval:
+ case expr:
+ if len(arg) == 1 {
+ return f
+ }
+ switch prefix := arg[0].(type) {
+ case literal, tmpval:
+ lhs, op, rhsprefix, ok := parseAssignLiteral(prefix.String())
+ if ok {
+ // $(eval foo = $(bar))
+ var rhs expr
+ if rhsprefix != literal("") {
+ rhs = append(rhs, rhsprefix)
+ }
+ rhs = append(rhs, arg[1:]...)
+ glog.V(1).Infof("eval assign %#v => lhs:%q op:%q rhs:%#v", f, lhs, op, rhs)
+ return &funcEvalAssign{
+ lhs: lhs,
+ op: op,
+ rhs: compactExpr(rhs),
+ }
+ }
+ }
+ // TODO(ukai): eval -> varassign. e.g $(eval $(foo) := $(x)).
+ return f
+ default:
+ return f
+ }
+ arg := f.args[1].String()
+ arg = stripComment(arg)
+ if arg == "" || strings.TrimSpace(arg) == "" {
+ return &funcNop{expr: f.String()}
+ }
+ f.args[1] = literal(arg)
+ lhs, op, rhs, ok := parseAssignLiteral(f.args[1].String())
+ if ok {
+ return &funcEvalAssign{
+ lhs: lhs,
+ op: op,
+ rhs: rhs,
+ }
+ }
+ return f
+}
+
+func stripComment(arg string) string {
+ for {
+ i := strings.Index(arg, "#")
+ if i < 0 {
+ return arg
+ }
+ eol := strings.Index(arg[i:], "\n")
+ if eol < 0 {
+ return arg[:i]
+ }
+ arg = arg[:i] + arg[eol+1:]
+ }
+}
+
+type funcNop struct{ expr string }
+
+func (f *funcNop) String() string { return f.expr }
+func (f *funcNop) Eval(evalWriter, *Evaluator) error { return nil }
+func (f *funcNop) serialize() serializableVar {
+ return serializableVar{
+ Type: "funcNop",
+ V: f.expr,
+ }
+}
+func (f *funcNop) dump(d *dumpbuf) {
+ d.Byte(valueTypeNop)
+}
+
+func parseAssignLiteral(s string) (lhs, op string, rhs Value, ok bool) {
+ eq := strings.Index(s, "=")
+ if eq < 0 {
+ return "", "", nil, false
+ }
+ // TODO(ukai): factor out parse assign?
+ lhs = s[:eq]
+ op = s[eq : eq+1]
+ if eq >= 1 && (s[eq-1] == ':' || s[eq-1] == '+' || s[eq-1] == '?') {
+ lhs = s[:eq-1]
+ op = s[eq-1 : eq+1]
+ }
+ lhs = strings.TrimSpace(lhs)
+ if strings.IndexAny(lhs, ":$") >= 0 {
+ // target specific var, or need eval.
+ return "", "", nil, false
+ }
+ r := strings.TrimLeft(s[eq+1:], " \t")
+ rhs = literal(r)
+ return lhs, op, rhs, true
+}
+
+type funcEvalAssign struct {
+ lhs string
+ op string
+ rhs Value
+}
+
+func (f *funcEvalAssign) String() string {
+ return fmt.Sprintf("$(eval %s %s %s)", f.lhs, f.op, f.rhs)
+}
+
+func (f *funcEvalAssign) Eval(w evalWriter, ev *Evaluator) error {
+ var abuf evalBuffer
+ abuf.resetSep()
+ err := f.rhs.Eval(&abuf, ev)
+ if err != nil {
+ return err
+ }
+ rhs := trimLeftSpaceBytes(abuf.Bytes())
+ glog.V(1).Infof("evalAssign: lhs=%q rhs=%s %q", f.lhs, f.rhs, rhs)
+ var rvalue Var
+ switch f.op {
+ case ":=":
+ // TODO(ukai): compute parsed expr in Compact when f.rhs is
+ // literal? e.g. literal("$(foo)") => varref{literal("foo")}.
+ exp, _, err := parseExpr(rhs, nil, parseOp{})
+ if err != nil {
+ return ev.errorf("eval assign error: %q: %v", f.String(), err)
+ }
+ vbuf := newEbuf()
+ err = exp.Eval(vbuf, ev)
+ if err != nil {
+ return err
+ }
+ rvalue = &simpleVar{value: []string{vbuf.String()}, origin: "file"}
+ vbuf.release()
+ case "=":
+ rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"}
+ case "+=":
+ prev := ev.LookupVar(f.lhs)
+ if prev.IsDefined() {
+ rvalue, err = prev.Append(ev, string(rhs))
+ if err != nil {
+ return err
+ }
+ } else {
+ rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"}
+ }
+ case "?=":
+ prev := ev.LookupVar(f.lhs)
+ if prev.IsDefined() {
+ return nil
+ }
+ rvalue = &recursiveVar{expr: tmpval(rhs), origin: "file"}
+ }
+ if glog.V(1) {
+ glog.Infof("Eval ASSIGN: %s=%q (flavor:%q)", f.lhs, rvalue, rvalue.Flavor())
+ }
+ ev.outVars.Assign(f.lhs, rvalue)
+ return nil
+}
+
+func (f *funcEvalAssign) serialize() serializableVar {
+ return serializableVar{
+ Type: "funcEvalAssign",
+ Children: []serializableVar{
+ serializableVar{V: f.lhs},
+ serializableVar{V: f.op},
+ f.rhs.serialize(),
+ },
+ }
+}
+
+func (f *funcEvalAssign) dump(d *dumpbuf) {
+ d.Byte(valueTypeAssign)
+ d.Str(f.lhs)
+ d.Str(f.op)
+ f.rhs.dump(d)
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Origin-Function
+type funcOrigin struct{ fclosure }
+
+func (f *funcOrigin) Arity() int { return 1 }
+func (f *funcOrigin) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("origin", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ v := ev.LookupVar(abuf.String())
+ abuf.release()
+ io.WriteString(w, v.Origin())
+ return nil
+}
+
+// https://www.gnu.org/software/make/manual/html_node/Flavor-Function.html#Flavor-Function
+type funcFlavor struct{ fclosure }
+
+func (f *funcFlavor) Arity() int { return 1 }
+func (f *funcFlavor) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("flavor", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ v := ev.LookupVar(abuf.String())
+ abuf.release()
+ io.WriteString(w, v.Flavor())
+ return nil
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Make-Control-Functions
+type funcInfo struct{ fclosure }
+
+func (f *funcInfo) Arity() int { return 1 }
+func (f *funcInfo) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("info", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ if ev.avoidIO {
+ ev.delayedOutputs = append(ev.delayedOutputs,
+ fmt.Sprintf("echo %q", abuf.String()))
+ ev.hasIO = true
+ abuf.release()
+ return nil
+ }
+ fmt.Printf("%s\n", abuf.String())
+ abuf.release()
+ return nil
+}
+
+type funcWarning struct{ fclosure }
+
+func (f *funcWarning) Arity() int { return 1 }
+func (f *funcWarning) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("warning", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ if ev.avoidIO {
+ ev.delayedOutputs = append(ev.delayedOutputs,
+ fmt.Sprintf("echo '%s: %s' 1>&2", ev.srcpos, abuf.String()))
+ ev.hasIO = true
+ abuf.release()
+ return nil
+ }
+ fmt.Printf("%s: %s\n", ev.srcpos, abuf.String())
+ abuf.release()
+ return nil
+}
+
+type funcError struct{ fclosure }
+
+func (f *funcError) Arity() int { return 1 }
+func (f *funcError) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("error", 1, len(f.args))
+ if err != nil {
+ return err
+ }
+ var abuf evalBuffer
+ abuf.resetSep()
+ err = f.args[1].Eval(&abuf, ev)
+ if err != nil {
+ return err
+ }
+ if ev.avoidIO {
+ ev.delayedOutputs = append(ev.delayedOutputs,
+ fmt.Sprintf("echo '%s: *** %s.' 1>&2 && false", ev.srcpos, abuf.String()))
+ ev.hasIO = true
+ abuf.release()
+ return nil
+ }
+ return ev.errorf("*** %s.", abuf.String())
+}
+
+// http://www.gnu.org/software/make/manual/make.html#Foreach-Function
+type funcForeach struct{ fclosure }
+
+func (f *funcForeach) Arity() int { return 3 }
+
+func (f *funcForeach) Eval(w evalWriter, ev *Evaluator) error {
+ err := assertArity("foreach", 3, len(f.args))
+ if err != nil {
+ return err
+ }
+ abuf := newEbuf()
+ err = f.args[1].Eval(abuf, ev)
+ if err != nil {
+ return err
+ }
+ varname := string(abuf.Bytes())
+ abuf.release()
+ wb := newWbuf()
+ err = f.args[2].Eval(wb, ev)
+ if err != nil {
+ return err
+ }
+ text := f.args[3]
+ ov := ev.LookupVar(varname)
+ space := false
+ for _, word := range wb.words {
+ ev.outVars.Assign(varname, &automaticVar{value: word})
+ if space {
+ writeByte(w, ' ')
+ }
+ err = text.Eval(w, ev)
+ if err != nil {
+ return err
+ }
+ space = true
+ }
+ wb.release()
+ av := ev.LookupVar(varname)
+ if _, ok := av.(*automaticVar); ok {
+ ev.outVars.Assign(varname, ov)
+ }
+ return nil
+}
diff --git a/golang/kati/func_test.go b/golang/kati/func_test.go
new file mode 100644
index 0000000..b56291e
--- /dev/null
+++ b/golang/kati/func_test.go
@@ -0,0 +1,76 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import "testing"
+
+func BenchmarkFuncStrip(b *testing.B) {
+ strip := &funcStrip{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(strip"),
+ literal("a b c "),
+ },
+ },
+ }
+ ev := NewEvaluator(make(map[string]Var))
+ var buf evalBuffer
+ b.ReportAllocs()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ buf.Reset()
+ strip.Eval(&buf, ev)
+ }
+}
+
+func BenchmarkFuncSort(b *testing.B) {
+ sort := &funcSort{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(sort"),
+ literal("foo bar lose"),
+ },
+ },
+ }
+ ev := NewEvaluator(make(map[string]Var))
+ var buf evalBuffer
+ b.ReportAllocs()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ buf.Reset()
+ sort.Eval(&buf, ev)
+ }
+}
+
+func BenchmarkFuncPatsubst(b *testing.B) {
+ patsubst := &funcPatsubst{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(patsubst"),
+ literal("%.java"),
+ literal("%.class"),
+ literal("foo.jar bar.java baz.h"),
+ },
+ },
+ }
+ ev := NewEvaluator(make(map[string]Var))
+ var buf evalBuffer
+ b.ReportAllocs()
+ b.ResetTimer()
+ for i := 0; i < b.N; i++ {
+ buf.Reset()
+ patsubst.Eval(&buf, ev)
+ }
+}
diff --git a/golang/kati/log.go b/golang/kati/log.go
new file mode 100644
index 0000000..4b3098f
--- /dev/null
+++ b/golang/kati/log.go
@@ -0,0 +1,39 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+
+ "github.com/golang/glog"
+)
+
+func logStats(f string, a ...interface{}) {
+ // TODO(ukai): vmodule?
+ if !StatsFlag {
+ return
+ }
+ glog.Infof(f, a...)
+}
+
+func warn(loc srcpos, f string, a ...interface{}) {
+ f = fmt.Sprintf("%s: warning: %s\n", loc, f)
+ fmt.Printf(f, a...)
+}
+
+func warnNoPrefix(loc srcpos, f string, a ...interface{}) {
+ f = fmt.Sprintf("%s: %s\n", loc, f)
+ fmt.Printf(f, a...)
+}
diff --git a/golang/kati/ninja.go b/golang/kati/ninja.go
new file mode 100644
index 0000000..f90ff24
--- /dev/null
+++ b/golang/kati/ninja.go
@@ -0,0 +1,777 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "fmt"
+ "os"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "sort"
+ "strings"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+type nodeState int
+
+const (
+ nodeInit nodeState = iota // not visited
+ nodeVisit // visited
+ nodeFile // visited & file exists
+ nodeAlias // visited & alias for other target
+ nodeMissing // visited & no target for this output
+ nodeBuild // visited & build emitted
+)
+
+func (s nodeState) String() string {
+ switch s {
+ case nodeInit:
+ return "node-init"
+ case nodeVisit:
+ return "node-visit"
+ case nodeFile:
+ return "node-file"
+ case nodeAlias:
+ return "node-alias"
+ case nodeMissing:
+ return "node-missing"
+ case nodeBuild:
+ return "node-build"
+ default:
+ return fmt.Sprintf("node-unknown[%d]", int(s))
+ }
+}
+
+// NinjaGenerator generates ninja build files from DepGraph.
+type NinjaGenerator struct {
+ // Args is original arguments to generate the ninja file.
+ Args []string
+ // Suffix is suffix for generated files.
+ Suffix string
+ // GomaDir is goma directory. If empty, goma will not be used.
+ GomaDir string
+ // DetectAndroidEcho detects echo as description.
+ DetectAndroidEcho bool
+
+ f *os.File
+ nodes []*DepNode
+ exports map[string]bool
+
+ ctx *execContext
+
+ ruleID int
+ done map[string]nodeState
+}
+
+func (n *NinjaGenerator) init(g *DepGraph) {
+ g.resolveVPATH()
+ n.nodes = g.nodes
+ n.exports = g.exports
+ n.ctx = newExecContext(g.vars, g.vpaths, true)
+ n.done = make(map[string]nodeState)
+}
+
+func getDepfileImpl(ss string) (string, error) {
+ tss := ss + " "
+ if (!strings.Contains(tss, " -MD ") && !strings.Contains(tss, " -MMD ")) || !strings.Contains(tss, " -c ") {
+ return "", nil
+ }
+
+ mfIndex := strings.Index(ss, " -MF ")
+ if mfIndex >= 0 {
+ mf := trimLeftSpace(ss[mfIndex+4:])
+ if strings.Index(mf, " -MF ") >= 0 {
+ return "", fmt.Errorf("Multiple output file candidates in %s", ss)
+ }
+ mfEndIndex := strings.IndexAny(mf, " \t\n")
+ if mfEndIndex >= 0 {
+ mf = mf[:mfEndIndex]
+ }
+
+ return mf, nil
+ }
+
+ outIndex := strings.Index(ss, " -o ")
+ if outIndex < 0 {
+ return "", fmt.Errorf("Cannot find the depfile in %s", ss)
+ }
+ out := trimLeftSpace(ss[outIndex+4:])
+ if strings.Index(out, " -o ") >= 0 {
+ return "", fmt.Errorf("Multiple output file candidates in %s", ss)
+ }
+ outEndIndex := strings.IndexAny(out, " \t\n")
+ if outEndIndex >= 0 {
+ out = out[:outEndIndex]
+ }
+ return stripExt(out) + ".d", nil
+}
+
+// getDepfile gets depfile from cmdline, and returns cmdline and depfile.
+func getDepfile(cmdline string) (string, string, error) {
+ // A hack for Android - llvm-rs-cc seems not to emit a dep file.
+ if strings.Contains(cmdline, "bin/llvm-rs-cc ") {
+ return cmdline, "", nil
+ }
+
+ depfile, err := getDepfileImpl(cmdline)
+ if depfile == "" || err != nil {
+ return cmdline, depfile, err
+ }
+
+ // A hack for Makefiles generated by automake.
+ mvCmd := "(mv -f " + depfile + " "
+ if i := strings.LastIndex(cmdline, mvCmd); i >= 0 {
+ rest := cmdline[i+len(mvCmd):]
+ ei := strings.IndexByte(rest, ')')
+ if ei < 0 {
+ return cmdline, "", fmt.Errorf("unbalanced parenthes? %s", cmdline)
+ }
+ cmdline = cmdline[:i] + "(cp -f " + depfile + " " + rest
+ return cmdline, depfile, nil
+ }
+
+ // A hack for Android to get .P files instead of .d.
+ p := stripExt(depfile) + ".P"
+ if strings.Contains(cmdline, p) {
+ rmfCmd := "; rm -f " + depfile
+ ncmdline := strings.Replace(cmdline, rmfCmd, "", 1)
+ if ncmdline == cmdline {
+ return cmdline, "", fmt.Errorf("cannot find removal of .d file: %s", cmdline)
+ }
+ return ncmdline, p, nil
+ }
+
+ // A hack for Android. For .s files, GCC does not use
+ // C preprocessor, so it ignores -MF flag.
+ as := "/" + stripExt(filepath.Base(depfile)) + ".s"
+ if strings.Contains(cmdline, as) {
+ return cmdline, "", nil
+ }
+
+ cmdline += fmt.Sprintf(" && cp %s %s.tmp", depfile, depfile)
+ depfile += ".tmp"
+ return cmdline, depfile, nil
+}
+
+func trimTailingSlash(s string) string {
+ if s == "" {
+ return s
+ }
+ if s[len(s)-1] != '\\' {
+ return s
+ }
+ // drop single trailing slash - multiline_arg.mk
+ if len(s) > 2 && s[len(s)-2] != '\\' {
+ return s[:len(s)-1]
+ }
+ // preserve two trailing slash - escaped_backslash.mk
+ return s
+}
+
+func stripShellComment(s string) string {
+ if strings.IndexByte(s, '#') < 0 {
+ // Fast path.
+ return s
+ }
+ // set space as an initial value so the leading comment will be
+ // stripped out.
+ lastch := rune(' ')
+ var escape bool
+ var quote rune
+ var skip rune
+ var cmdsubst []rune
+ var buf bytes.Buffer
+Loop:
+ for _, c := range s {
+ if skip != 0 {
+ if skip != c {
+ continue Loop
+ }
+ if len(cmdsubst) > 0 && cmdsubst[len(cmdsubst)-1] == skip {
+ cmdsubst = cmdsubst[:len(cmdsubst)-1]
+ }
+ skip = 0
+ }
+ if quote != 0 {
+ if quote == c && (quote == '\'' || !escape) {
+ quote = 0
+ }
+ } else if !escape {
+ if c == '#' && isWhitespace(lastch) {
+ if len(cmdsubst) == 0 {
+ // strip comment until the end of line.
+ skip = '\n'
+ continue Loop
+ }
+ // strip comment until the end of command subst.
+ skip = cmdsubst[len(cmdsubst)-1]
+ continue Loop
+ } else if c == '\'' || c == '"' {
+ quote = c
+ } else if lastch == '$' && c == '(' {
+ cmdsubst = append(cmdsubst, ')')
+ } else if c == '`' {
+ cmdsubst = append(cmdsubst, '`')
+ }
+ }
+ if escape {
+ escape = false
+ } else if c == '\\' {
+ escape = true
+ } else {
+ escape = false
+ }
+ lastch = c
+ buf.WriteRune(c)
+ }
+ return buf.String()
+}
+
+var ccRE = regexp.MustCompile(`^prebuilts/(gcc|clang)/.*(gcc|g\+\+|clang|clang\+\+) .* ?-c `)
+
+func gomaCmdForAndroidCompileCmd(cmd string) (string, bool) {
+ i := strings.Index(cmd, " ")
+ if i < 0 {
+ return cmd, false
+ }
+ driver := cmd[:i]
+ if strings.HasSuffix(driver, "ccache") {
+ return gomaCmdForAndroidCompileCmd(cmd[i+1:])
+ }
+ return cmd, ccRE.MatchString(cmd)
+}
+
+func descriptionFromCmd(cmd string) (string, bool) {
+ if !strings.HasPrefix(cmd, "echo") || !isWhitespace(rune(cmd[4])) {
+ return "", false
+ }
+ echoarg := cmd[5:]
+
+ // strip outer quotes, and fail if it is not a single echo command.
+ var buf bytes.Buffer
+ var escape bool
+ var quote rune
+ for _, c := range echoarg {
+ if escape {
+ escape = false
+ buf.WriteRune(c)
+ continue
+ }
+ if c == '\\' {
+ escape = true
+ buf.WriteRune(c)
+ continue
+ }
+ if quote != 0 {
+ if c == quote {
+ quote = 0
+ continue
+ }
+ buf.WriteRune(c)
+ continue
+ }
+ switch c {
+ case '\'', '"', '`':
+ quote = c
+ case '<', '>', '&', '|', ';':
+ return "", false
+ default:
+ buf.WriteRune(c)
+ }
+ }
+ return buf.String(), true
+}
+
+func (n *NinjaGenerator) genShellScript(runners []runner) (cmd string, desc string, useLocalPool bool) {
+ const defaultDesc = "build $out"
+ var useGomacc bool
+ var buf bytes.Buffer
+ for i, r := range runners {
+ if i > 0 {
+ if runners[i-1].ignoreError {
+ buf.WriteString(" ; ")
+ } else {
+ buf.WriteString(" && ")
+ }
+ }
+ cmd := trimTailingSlash(r.cmd)
+ cmd = stripShellComment(cmd)
+ cmd = trimLeftSpace(cmd)
+ cmd = strings.Replace(cmd, "\\\n\t", "", -1)
+ cmd = strings.Replace(cmd, "\\\n", "", -1)
+ cmd = strings.TrimRight(cmd, " \t\n;")
+ cmd = escapeNinja(cmd)
+ if cmd == "" {
+ cmd = "true"
+ }
+ glog.V(2).Infof("cmd %q=>%q", r.cmd, cmd)
+ if n.GomaDir != "" {
+ rcmd, ok := gomaCmdForAndroidCompileCmd(cmd)
+ if ok {
+ cmd = fmt.Sprintf("%s/gomacc %s", n.GomaDir, rcmd)
+ useGomacc = true
+ }
+ }
+ if n.DetectAndroidEcho && desc == "" {
+ d, ok := descriptionFromCmd(cmd)
+ if ok {
+ desc = d
+ cmd = "true"
+ }
+ }
+ needsSubShell := i > 0 || len(runners) > 1
+ if cmd[0] == '(' {
+ needsSubShell = false
+ }
+
+ if needsSubShell {
+ buf.WriteByte('(')
+ }
+ buf.WriteString(cmd)
+ if i == len(runners)-1 && r.ignoreError {
+ buf.WriteString(" ; true")
+ }
+ if needsSubShell {
+ buf.WriteByte(')')
+ }
+ }
+ if desc == "" {
+ desc = defaultDesc
+ }
+ return buf.String(), desc, n.GomaDir != "" && !useGomacc
+}
+
+func (n *NinjaGenerator) genRuleName() string {
+ ruleName := fmt.Sprintf("rule%d", n.ruleID)
+ n.ruleID++
+ return ruleName
+}
+
+func (n *NinjaGenerator) emitBuild(output, rule, inputs, orderOnlys string) {
+ fmt.Fprintf(n.f, "build %s: %s", escapeBuildTarget(output), rule)
+ if inputs != "" {
+ fmt.Fprintf(n.f, " %s", inputs)
+ }
+ if orderOnlys != "" {
+ fmt.Fprintf(n.f, " || %s", orderOnlys)
+ }
+}
+
+func escapeBuildTarget(s string) string {
+ i := strings.IndexAny(s, "$: \\")
+ if i < 0 {
+ return s
+ }
+ // unescapeInput only "\ ", "\=" unescape as " ", "=".
+ // TODO(ukai): which char should unescape, which should not here?
+ var esc rune
+ var buf bytes.Buffer
+ for _, c := range s {
+ switch c {
+ case '\\':
+ esc = c
+ continue
+ case '$', ':', ' ':
+ esc = 0
+ buf.WriteByte('$')
+ }
+ if esc != 0 {
+ buf.WriteRune(esc)
+ esc = 0
+ }
+ buf.WriteRune(c)
+ }
+ if esc != 0 {
+ buf.WriteRune(esc)
+ }
+ return buf.String()
+}
+
+func (n *NinjaGenerator) dependency(node *DepNode) (string, string) {
+ var deps []string
+ seen := make(map[string]bool)
+ for _, d := range node.Deps {
+ t := escapeBuildTarget(d.Output)
+ if seen[t] {
+ continue
+ }
+ deps = append(deps, t)
+ seen[t] = true
+ }
+ var orderOnlys []string
+ for _, d := range node.OrderOnlys {
+ t := escapeBuildTarget(d.Output)
+ if seen[t] {
+ continue
+ }
+ orderOnlys = append(orderOnlys, t)
+ seen[t] = true
+ }
+ return strings.Join(deps, " "), strings.Join(orderOnlys, " ")
+}
+
+func escapeNinja(s string) string {
+ return strings.Replace(s, "$", "$$", -1)
+}
+
+func escapeShell(s string) string {
+ i := strings.IndexAny(s, "$`!\\\"")
+ if i < 0 {
+ return s
+ }
+ var buf bytes.Buffer
+ var lastDollar bool
+ for _, c := range s {
+ switch c {
+ case '$':
+ if lastDollar {
+ buf.WriteRune(c)
+ lastDollar = false
+ continue
+ }
+ buf.WriteString(`\$`)
+ lastDollar = true
+ continue
+ case '`', '"', '!', '\\':
+ buf.WriteByte('\\')
+ }
+ buf.WriteRune(c)
+ lastDollar = false
+ }
+ return buf.String()
+}
+
+func (n *NinjaGenerator) ninjaVars(s string, nv [][]string, esc func(string) string) string {
+ for _, v := range nv {
+ k, v := v[0], v[1]
+ if v == "" {
+ continue
+ }
+ if strings.Contains(v, "/./") || strings.Contains(v, "/../") || strings.Contains(v, "$") {
+ // ninja will normalize paths (/./, /../), so keep it as is
+ // ninja will emit quoted string for $
+ continue
+ }
+ if esc != nil {
+ v = esc(v)
+ }
+ s = strings.Replace(s, v, k, -1)
+ }
+ return s
+}
+
+func (n *NinjaGenerator) emitNode(node *DepNode) error {
+ output := node.Output
+ if _, found := n.done[output]; found {
+ return nil
+ }
+ n.done[output] = nodeVisit
+
+ if len(node.Cmds) == 0 && len(node.Deps) == 0 && len(node.OrderOnlys) == 0 && !node.IsPhony {
+ if _, ok := n.ctx.vpaths.exists(output); ok {
+ n.done[output] = nodeFile
+ return nil
+ }
+ o := filepath.Clean(output)
+ if o != output {
+ // if normalized target has been done, it marks as alias.
+ if s, found := n.done[o]; found {
+ glog.V(1).Infof("node %s=%s => %s=alias", o, s, node.Output)
+ n.done[output] = nodeAlias
+ return nil
+ }
+ }
+ if node.Filename == "" {
+ n.done[output] = nodeMissing
+ }
+ return nil
+ }
+
+ runners, _, err := createRunners(n.ctx, node)
+ if err != nil {
+ return err
+ }
+ ruleName := "phony"
+ useLocalPool := false
+ inputs, orderOnlys := n.dependency(node)
+ if len(runners) > 0 {
+ ruleName = n.genRuleName()
+ fmt.Fprintf(n.f, "\n# rule for %q\n", node.Output)
+ fmt.Fprintf(n.f, "rule %s\n", ruleName)
+
+ ss, desc, ulp := n.genShellScript(runners)
+ if ulp {
+ useLocalPool = true
+ }
+ fmt.Fprintf(n.f, " description = %s\n", desc)
+ cmdline, depfile, err := getDepfile(ss)
+ if err != nil {
+ return err
+ }
+ if depfile != "" {
+ fmt.Fprintf(n.f, " depfile = %s\n", depfile)
+ fmt.Fprintf(n.f, " deps = gcc\n")
+ }
+ nv := [][]string{
+ []string{"${in}", inputs},
+ []string{"${out}", escapeNinja(output)},
+ }
+ // It seems Linux is OK with ~130kB.
+ // TODO: Find this number automatically.
+ ArgLenLimit := 100 * 1000
+ if len(cmdline) > ArgLenLimit {
+ fmt.Fprintf(n.f, " rspfile = $out.rsp\n")
+ cmdline = n.ninjaVars(cmdline, nv, nil)
+ fmt.Fprintf(n.f, " rspfile_content = %s\n", cmdline)
+ fmt.Fprintf(n.f, " command = %s $out.rsp\n", n.ctx.shell)
+ } else {
+ cmdline = escapeShell(cmdline)
+ cmdline = n.ninjaVars(cmdline, nv, escapeShell)
+ fmt.Fprintf(n.f, " command = %s -c \"%s\"\n", n.ctx.shell, cmdline)
+ }
+ }
+ n.emitBuild(output, ruleName, inputs, orderOnlys)
+ if useLocalPool {
+ fmt.Fprintf(n.f, " pool = local_pool\n")
+ }
+ fmt.Fprintf(n.f, "\n")
+ n.done[output] = nodeBuild
+
+ for _, d := range node.Deps {
+ err := n.emitNode(d)
+ if err != nil {
+ return err
+ }
+ glog.V(1).Infof("node %s dep node %q %s", node.Output, d.Output, n.done[d.Output])
+ }
+ for _, d := range node.OrderOnlys {
+ err := n.emitNode(d)
+ if err != nil {
+ return err
+ }
+ glog.V(1).Infof("node %s order node %q %s", node.Output, d.Output, n.done[d.Output])
+ }
+ return nil
+}
+
+func (n *NinjaGenerator) emitRegenRules() error {
+ if len(n.Args) == 0 {
+ return nil
+ }
+ mkfiles, err := n.ctx.ev.EvaluateVar("MAKEFILE_LIST")
+ if err != nil {
+ return err
+ }
+ fmt.Fprintf(n.f, `
+rule regen_ninja
+ description = Regenerate ninja files due to dependency
+ generator=1
+ command=%s
+`, strings.Join(n.Args, " "))
+ fmt.Fprintf(n.f, "build %s: regen_ninja %s", n.ninjaName(), mkfiles)
+ // TODO: Add dependencies to directories read by $(wildcard) or
+ // $(shell find).
+ if len(usedEnvs) > 0 {
+ fmt.Fprintf(n.f, " %s", n.envlistName())
+ }
+ fmt.Fprintf(n.f, "\n\n")
+ return nil
+}
+
+func (n *NinjaGenerator) shName() string {
+ return fmt.Sprintf("ninja%s.sh", n.Suffix)
+}
+
+func (n *NinjaGenerator) ninjaName() string {
+ return fmt.Sprintf("build%s.ninja", n.Suffix)
+}
+
+func (n *NinjaGenerator) envlistName() string {
+ return fmt.Sprintf(".kati_env%s", n.Suffix)
+}
+
+func (n *NinjaGenerator) generateEnvlist() (err error) {
+ f, err := os.Create(n.envlistName())
+ if err != nil {
+ return err
+ }
+ defer func() {
+ cerr := f.Close()
+ if err == nil {
+ err = cerr
+ }
+ }()
+ for k := range usedEnvs {
+ v, err := n.ctx.ev.EvaluateVar(k)
+ if err != nil {
+ return err
+ }
+ fmt.Fprintf(f, "%q=%q\n", k, v)
+ }
+ return nil
+}
+
+func (n *NinjaGenerator) generateShell() (err error) {
+ f, err := os.Create(n.shName())
+ if err != nil {
+ return err
+ }
+ defer func() {
+ cerr := f.Close()
+ if err == nil {
+ err = cerr
+ }
+ }()
+
+ fmt.Fprintf(f, "#!/bin/bash\n")
+ fmt.Fprintf(f, "# Generated by kati %s\n", gitVersion)
+ fmt.Fprintln(f)
+ fmt.Fprintln(f, `cd $(dirname "$0")`)
+ if n.Suffix != "" {
+ fmt.Fprintf(f, "if [ -f %s ]; then\n export $(cat %s)\nfi\n", n.envlistName(), n.envlistName())
+ }
+ for name, export := range n.exports {
+ // export "a b"=c will error on bash
+ // bash: export `a b=c': not a valid identifier
+ if strings.ContainsAny(name, " \t\n\r") {
+ glog.V(1).Infof("ignore export %q (export:%t)", name, export)
+ continue
+ }
+ if export {
+ v, err := n.ctx.ev.EvaluateVar(name)
+ if err != nil {
+ return err
+ }
+ fmt.Fprintf(f, "export %q=%q\n", name, v)
+ } else {
+ fmt.Fprintf(f, "unset %q\n", name)
+ }
+ }
+ if n.GomaDir == "" {
+ fmt.Fprintf(f, `exec ninja -f %s "$@"`+"\n", n.ninjaName())
+ } else {
+ fmt.Fprintf(f, `exec ninja -f %s -j500 "$@"`+"\n", n.ninjaName())
+ }
+
+ return f.Chmod(0755)
+}
+
+func (n *NinjaGenerator) generateNinja(defaultTarget string) (err error) {
+ f, err := os.Create(n.ninjaName())
+ if err != nil {
+ return err
+ }
+ defer func() {
+ cerr := f.Close()
+ if err == nil {
+ err = cerr
+ }
+ }()
+
+ n.f = f
+ fmt.Fprintf(n.f, "# Generated by kati %s\n", gitVersion)
+ fmt.Fprintf(n.f, "\n")
+
+ if len(usedEnvs) > 0 {
+ fmt.Fprintln(n.f, "# Environment variables used:")
+ var names []string
+ for name := range usedEnvs {
+ names = append(names, name)
+ }
+ sort.Strings(names)
+ for _, name := range names {
+ v, err := n.ctx.ev.EvaluateVar(name)
+ if err != nil {
+ return err
+ }
+ fmt.Fprintf(n.f, "# %q=%q\n", name, v)
+ }
+ fmt.Fprintf(n.f, "\n")
+ }
+
+ if n.GomaDir != "" {
+ fmt.Fprintf(n.f, "pool local_pool\n")
+ fmt.Fprintf(n.f, " depth = %d\n\n", runtime.NumCPU())
+ }
+
+ err = n.emitRegenRules()
+ if err != nil {
+ return err
+ }
+
+ // defining $out for $@ and $in for $^ here doesn't work well,
+ // because these texts will be processed in escapeShell...
+ for _, node := range n.nodes {
+ err := n.emitNode(node)
+ if err != nil {
+ return err
+ }
+ glog.V(1).Infof("node %q %s", node.Output, n.done[node.Output])
+ }
+
+ // emit phony targets for visited nodes that are
+ // - not existing file
+ // - not alias for other targets.
+ var nodes []string
+ for node, state := range n.done {
+ if state != nodeVisit {
+ continue
+ }
+ nodes = append(nodes, node)
+ }
+ if len(nodes) > 0 {
+ fmt.Fprintln(n.f)
+ sort.Strings(nodes)
+ for _, node := range nodes {
+ n.emitBuild(node, "phony", "", "")
+ fmt.Fprintln(n.f)
+ n.done[node] = nodeBuild
+ }
+ }
+
+ // emit default if the target was emitted.
+ if defaultTarget != "" && n.done[defaultTarget] == nodeBuild {
+ fmt.Fprintf(n.f, "\ndefault %s\n", escapeNinja(defaultTarget))
+ }
+ return nil
+}
+
+// Save generates build.ninja from DepGraph.
+func (n *NinjaGenerator) Save(g *DepGraph, name string, targets []string) error {
+ startTime := time.Now()
+ n.init(g)
+ err := n.generateEnvlist()
+ if err != nil {
+ return err
+ }
+ err = n.generateShell()
+ if err != nil {
+ return err
+ }
+ var defaultTarget string
+ if len(targets) == 0 && len(g.nodes) > 0 {
+ defaultTarget = g.nodes[0].Output
+ }
+ err = n.generateNinja(defaultTarget)
+ if err != nil {
+ return err
+ }
+ logStats("generate ninja time: %q", time.Since(startTime))
+ return nil
+}
diff --git a/golang/kati/ninja_test.go b/golang/kati/ninja_test.go
new file mode 100644
index 0000000..48a52f6
--- /dev/null
+++ b/golang/kati/ninja_test.go
@@ -0,0 +1,202 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import "testing"
+
+func TestStripShellComment(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want string
+ }{
+ {
+ in: `foo`,
+ want: `foo`,
+ },
+ {
+ in: `foo # bar`,
+ want: `foo `,
+ },
+ {
+ in: `foo '# bar'`,
+ want: `foo '# bar'`,
+ },
+ {
+ in: `foo '\'# bar'`,
+ want: `foo '\'# bar'`, // unbalanced '
+ },
+ {
+ in: `foo "# bar"`,
+ want: `foo "# bar"`,
+ },
+ {
+ in: `foo "\"# bar"`,
+ want: `foo "\"# bar"`,
+ },
+ {
+ in: `foo "\\"# bar"`,
+ want: `foo "\\"# bar"`, // unbalanced "
+ },
+ {
+ in: "foo `# bar`",
+ want: "foo `# bar`",
+ },
+ {
+ in: "foo `\\`# bar`",
+ want: "foo `\\`# bar`", // unbalanced `
+ },
+ {
+ in: "foo `\\\\`# bar`",
+ want: "foo `\\\\`# bar`",
+ },
+ } {
+ got := stripShellComment(tc.in)
+ if got != tc.want {
+ t.Errorf(`stripShellComment(%q)=%q, want %q`, tc.in, got, tc.want)
+ }
+ }
+}
+
+func TestGetDepFile(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ cmd string
+ depfile string
+ err bool
+ }{
+ {
+ in: `g++ -c fat.cc -o fat.o`,
+ },
+ {
+ in: `g++ -c fat.cc -MD`,
+ err: true,
+ },
+ {
+ in: `g++ -c fat.cc -MD -o fat.o -o fat.o`,
+ err: true,
+ },
+ {
+ in: `g++ -c fat.cc -MD -o fat.o`,
+ cmd: `g++ -c fat.cc -MD -o fat.o && cp fat.d fat.d.tmp`,
+ depfile: `fat.d.tmp`,
+ },
+ {
+ in: `g++ -c fat.cc -MD -o fat`,
+ cmd: `g++ -c fat.cc -MD -o fat && cp fat.d fat.d.tmp`,
+ depfile: `fat.d.tmp`,
+ },
+ {
+ in: `g++ -c fat.cc -MD -MF foo.d -o fat.o`,
+ cmd: `g++ -c fat.cc -MD -MF foo.d -o fat.o && cp foo.d foo.d.tmp`,
+ depfile: `foo.d.tmp`,
+ },
+ {
+ in: `g++ -c fat.cc -MD -o fat.o -MF foo.d`,
+ cmd: `g++ -c fat.cc -MD -o fat.o -MF foo.d && cp foo.d foo.d.tmp`,
+ depfile: `foo.d.tmp`,
+ },
+ // A real example from maloader.
+ {
+ in: `g++ -g -Iinclude -Wall -MMD -fno-omit-frame-pointer -O -m64 -W -Werror -c -o fat.o fat.cc`,
+ cmd: `g++ -g -Iinclude -Wall -MMD -fno-omit-frame-pointer -O -m64 -W -Werror -c -o fat.o fat.cc && cp fat.d fat.d.tmp`,
+ depfile: `fat.d.tmp`,
+ },
+ // A real example from Android.
+ {
+ in: `mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo "host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; rm -f out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d`,
+ cmd: `mkdir -p out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/ && echo "host C++: llvm-rs-cc <= frameworks/compile/slang/llvm-rs-cc.cpp" && prebuilts/clang/linux-x86/host/3.6/bin/clang++ -I external/llvm -I external/llvm/include -I external/llvm/host/include -I external/clang/include -I external/clang/lib/CodeGen -I frameworks/compile/libbcc/include -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates/include -I external/libcxx/include -I frameworks/compile/slang -I out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates -I out/host/linux-x86/gen/EXECUTABLES/llvm-rs-cc_intermediates -I libnativehelper/include/nativehelper $(cat out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem tools/include -isystem out/host/linux-x86/obj/include -c -fno-exceptions -Wno-multichar -m64 -Wa,--noexecstack -fPIC -no-canonical-prefixes -include build/core/combo/include/arch/linux-x86/AndroidConfig.h -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -O2 -g -fno-strict-aliasing -DNDEBUG -UDEBUG -D__compiler_offsetof=__builtin_offsetof -Werror=int-conversion -Wno-unused-command-line-argument --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -target x86_64-linux-gnu -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Wsign-promo -std=gnu++11 -DNDEBUG -UDEBUG -Wno-inconsistent-missing-override --gcc-toolchain=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/ --sysroot=prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//sysroot -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8 -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/x86_64-linux -isystem prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8//x86_64-linux/include/c++/4.8/backward -target x86_64-linux-gnu -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -D_GNU_SOURCE -D__STDC_LIMIT_MACROS -O2 -fomit-frame-pointer -Wall -W -Wno-unused-parameter -Wwrite-strings -Dsprintf=sprintf -pedantic -Wcast-qual -Wno-long-long -Wno-sign-promo -Wall -Wno-unused-parameter -Wno-return-type -Werror -std=c++11 -O0 -DTARGET_BUILD_VARIANT=eng -DRS_VERSION=23 -fno-exceptions -fpie -D_USING_LIBCXX -Wno-sign-promo -fno-rtti -Woverloaded-virtual -Wno-sign-promo -std=c++11 -nostdinc++ -MD -MF out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d -o out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.o frameworks/compile/slang/llvm-rs-cc.cpp && cp out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$//' -e '/^$/ d' -e 's/$/ :/' < out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.d >> out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P`,
+ depfile: `out/host/linux-x86/obj/EXECUTABLES/llvm-rs-cc_intermediates/llvm-rs-cc.P`,
+ },
+ {
+ in: `echo "target asm: libsonivox <= external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s" && mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ && prebuilts/gcc/linux-x86/arm/arm-linux-androideabi-4.9/bin/arm-linux-androideabi-gcc -I external/sonivox/arm-wt-22k/host_src -I external/sonivox/arm-wt-22k/lib_src -I external/libcxx/include -I external/sonivox/arm-wt-22k -I out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates -I out/target/product/generic/gen/SHARED_LIBRARIES/libsonivox_intermediates -I libnativehelper/include/nativehelper $$(cat out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/import_includes) -isystem system/core/include -isystem hardware/libhardware/include -isystem hardware/libhardware_legacy/include -isystem hardware/ril/include -isystem libnativehelper/include -isystem frameworks/native/include -isystem frameworks/native/opengl/include -isystem frameworks/av/include -isystem frameworks/base/include -isystem out/target/product/generic/obj/include -isystem bionic/libc/arch-arm/include -isystem bionic/libc/include -isystem bionic/libc/kernel/uapi -isystem bionic/libc/kernel/uapi/asm-arm -isystem bionic/libm/include -isystem bionic/libm/include/arm -c -fno-exceptions -Wno-multichar -msoft-float -ffunction-sections -fdata-sections -funwind-tables -fstack-protector -Wa,--noexecstack -Werror=format-security -D_FORTIFY_SOURCE=2 -fno-short-enums -no-canonical-prefixes -fno-canonical-system-headers -march=armv7-a -mfloat-abi=softfp -mfpu=vfpv3-d16 -include build/core/combo/include/arch/linux-arm/AndroidConfig.h -I build/core/combo/include/arch/linux-arm/ -fno-builtin-sin -fno-strict-volatile-bitfields -Wno-psabi -mthumb-interwork -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -DNDEBUG -g -Wstrict-aliasing=2 -fgcse-after-reload -frerun-cse-after-loop -frename-registers -DNDEBUG -UDEBUG -Wa,"-I" -Wa,"external/sonivox/arm-wt-22k/lib_src" -Wa,"--defsym" -Wa,"SAMPLE_RATE_22050=1" -Wa,"--defsym" -Wa,"STEREO_OUTPUT=1" -Wa,"--defsym" -Wa,"FILTER_ENABLED=1" -Wa,"--defsym" -Wa,"SAMPLES_8_BIT=1" -D__ASSEMBLY__ -MD -MF out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.d -o out/target/product/generic/obj/SHARED_LIBRARIES/libsonivox_intermediates/lib_src/ARM-E_filter_gnu.o external/sonivox/arm-wt-22k/lib_src/ARM-E_filter_gnu.s`,
+ depfile: ``,
+ },
+ {
+ in: `echo "RenderScript: Galaxy4 <= packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs" && rm -rf out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src && out/host/linux-x86/bin/llvm-rs-cc -o out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/res/raw -p out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript/src -d out/target/common/obj/APPS/Galaxy4_intermediates/src/renderscript -a out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp -MD -target-api 14 -Wall -Werror -I prebuilts/sdk/renderscript/clang-include -I prebuilts/sdk/renderscript/include packages/wallpapers/Galaxy4/src/com/android/galaxy4/galaxy.rs && mkdir -p out/target/common/obj/APPS/Galaxy4_intermediates/src/ && touch out/target/common/obj/APPS/Galaxy4_intermediates/src/RenderScript.stamp`,
+ depfile: ``,
+ },
+ {
+ in: `(echo "bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; rm -f out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d)`,
+ cmd: `(echo "bc: libclcore.bc <= frameworks/rs/driver/runtime/arch/generic.c") && (mkdir -p out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/) && (prebuilts/clang/linux-x86/host/3.6/bin/clang -Iframeworks/rs/scriptc -Iexternal/clang/lib/Headers -MD -DRS_VERSION=23 -std=c99 -c -O3 -fno-builtin -emit-llvm -target armv7-none-linux-gnueabi -fsigned-char -Iframeworks/rs/cpu_ref -DRS_DECLARE_EXPIRED_APIS -Xclang -target-feature -Xclang +long64 frameworks/rs/driver/runtime/arch/generic.c -o out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.bc) && (cp out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P; sed -e 's/#.*//' -e 's/^[^:]*: *//' -e 's/ *\\$$//' -e '/^$$/ d' -e 's/$$/ :/' < out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.d >> out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P)`,
+ depfile: `out/target/product/generic/obj/SHARED_LIBRARIES/libclcore.bc_intermediates/arch/generic.P`,
+ },
+ {
+ in: `gcc -c foo.P.c`,
+ depfile: ``,
+ },
+ {
+ in: `(/bin/sh ./libtool --tag=CXX --mode=compile g++ -DHAVE_CONFIG_H -I. -I./src -I./src -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -DNDEBUG -g -O2 -MT libglog_la-logging.lo -MD -MP -MF .deps/libglog_la-logging.Tpo -c -o libglog_la-logging.lo ` + "`" + `test -f 'src/logging.cc' || echo './'` + "`" + `src/logging.cc) && (mv -f .deps/libglog_la-logging.Tpo .deps/libglog_la-logging.Plo)`,
+ cmd: `(/bin/sh ./libtool --tag=CXX --mode=compile g++ -DHAVE_CONFIG_H -I. -I./src -I./src -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -DNDEBUG -g -O2 -MT libglog_la-logging.lo -MD -MP -MF .deps/libglog_la-logging.Tpo -c -o libglog_la-logging.lo ` + "`" + `test -f 'src/logging.cc' || echo './'` + "`" + `src/logging.cc) && (cp -f .deps/libglog_la-logging.Tpo .deps/libglog_la-logging.Plo)`,
+ depfile: `.deps/libglog_la-logging.Tpo`,
+ },
+ {
+ in: `(g++ -DHAVE_CONFIG_H -I. -I./src -I./src -pthread -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -g -O2 -MT signalhandler_unittest-signalhandler_unittest.o -MD -MP -MF .deps/signalhandler_unittest-signalhandler_unittest.Tpo -c -o signalhandler_unittest-signalhandler_unittest.o ` + "`" + `test -f 'src/signalhandler_unittest.cc' || echo './'` + "`" + `src/signalhandler_unittest.cc) && (mv -f .deps/signalhandler_unittest-signalhandler_unittest.Tpo .deps/signalhandler_unittest-signalhandler_unittest.Po)`,
+ cmd: `(g++ -DHAVE_CONFIG_H -I. -I./src -I./src -pthread -Wall -Wwrite-strings -Woverloaded-virtual -Wno-sign-compare -DNO_FRAME_POINTER -g -O2 -MT signalhandler_unittest-signalhandler_unittest.o -MD -MP -MF .deps/signalhandler_unittest-signalhandler_unittest.Tpo -c -o signalhandler_unittest-signalhandler_unittest.o ` + "`" + `test -f 'src/signalhandler_unittest.cc' || echo './'` + "`" + `src/signalhandler_unittest.cc) && (cp -f .deps/signalhandler_unittest-signalhandler_unittest.Tpo .deps/signalhandler_unittest-signalhandler_unittest.Po)`,
+ depfile: `.deps/signalhandler_unittest-signalhandler_unittest.Tpo`,
+ },
+ } {
+ cmd, depfile, err := getDepfile(tc.in)
+ if tc.err && err == nil {
+ t.Errorf(`getDepfile(%q) unexpectedly has no error`, tc.in)
+ } else if !tc.err && err != nil {
+ t.Errorf(`getDepfile(%q) has an error: %q`, tc.in, err)
+ }
+
+ wantcmd := tc.cmd
+ if wantcmd == "" {
+ wantcmd = tc.in
+ }
+ if got, want := cmd, wantcmd; got != want {
+ t.Errorf("getDepfile(%q)=\n %q, _, _;\nwant=%q, _, _", tc.in, got, want)
+ }
+ if got, want := depfile, tc.depfile; got != want {
+ t.Errorf(`getDepfile(%q)=_, %q, _; want=_, %q, _`, tc.in, got, want)
+ }
+ }
+}
+
+func TestGomaCmdForAndroidCompileCmd(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want string
+ ok bool
+ }{
+ {
+ in: "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ",
+ ok: true,
+ },
+ {
+ in: "prebuilts/misc/linux-x86/ccache/ccache prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ",
+ want: "prebuilts/clang/linux-x86/host/3.6/bin/clang++ -c foo.c ",
+ ok: true,
+ },
+ {
+ in: "echo foo ",
+ ok: false,
+ },
+ } {
+ got, ok := gomaCmdForAndroidCompileCmd(tc.in)
+ want := tc.want
+ if tc.want == "" {
+ want = tc.in
+ }
+ if got != want {
+ t.Errorf("gomaCmdForAndroidCompileCmd(%q)=%q, _; want=%q, _", tc.in, got, tc.want)
+ }
+ if ok != tc.ok {
+ t.Errorf("gomaCmdForAndroidCompileCmd(%q)=_, %t; want=_, %t", tc.in, ok, tc.ok)
+ }
+ }
+}
diff --git a/golang/kati/parser.go b/golang/kati/parser.go
new file mode 100644
index 0000000..aef20e2
--- /dev/null
+++ b/golang/kati/parser.go
@@ -0,0 +1,821 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+//go:generate go run testcase/gen_testcase_parse_benchmark.go
+//
+// $ go generate
+// $ go test -bench .
+
+import (
+ "bufio"
+ "bytes"
+ "crypto/sha1"
+ "errors"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "strings"
+ "sync"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+type makefile struct {
+ filename string
+ stmts []ast
+}
+
+type ifState struct {
+ ast *ifAST
+ inElse bool
+ numNest int
+}
+
+type parser struct {
+ rd *bufio.Reader
+ mk makefile
+ lineno int
+ elineno int // lineno == elineno unless there is trailing '\'.
+ linenoFixed bool
+ done bool
+ outStmts *[]ast
+ inRecipe bool
+ ifStack []ifState
+
+ defineVar []byte
+ inDef []byte
+
+ defOpt string
+ numIfNest int
+ err error
+}
+
+func newParser(rd io.Reader, filename string) *parser {
+ p := &parser{
+ rd: bufio.NewReader(rd),
+ }
+ p.mk.filename = filename
+ p.outStmts = &p.mk.stmts
+ return p
+}
+
+func (p *parser) srcpos() srcpos {
+ return srcpos{
+ filename: p.mk.filename,
+ lineno: p.lineno,
+ }
+}
+
+func (p *parser) addStatement(stmt ast) {
+ *p.outStmts = append(*p.outStmts, stmt)
+ switch stmt.(type) {
+ case *maybeRuleAST:
+ p.inRecipe = true
+ case *assignAST, *includeAST, *exportAST:
+ p.inRecipe = false
+ }
+}
+
+func (p *parser) readLine() []byte {
+ if !p.linenoFixed {
+ p.lineno = p.elineno + 1
+ }
+ var line []byte
+ for !p.done {
+ buf, err := p.rd.ReadBytes('\n')
+ if !p.linenoFixed {
+ p.elineno++
+ }
+ if err == io.EOF {
+ p.done = true
+ } else if err != nil {
+ p.err = fmt.Errorf("readline %s: %v", p.srcpos(), err)
+ p.done = true
+ }
+ line = append(line, buf...)
+ buf = bytes.TrimRight(buf, "\r\n")
+ glog.V(4).Infof("buf:%q", buf)
+ backslash := false
+ for len(buf) > 0 && buf[len(buf)-1] == '\\' {
+ buf = buf[:len(buf)-1]
+ backslash = !backslash
+ }
+ if !backslash {
+ glog.V(4).Infof("no concat line:%q", buf)
+ break
+ }
+ }
+ line = bytes.TrimRight(line, "\r\n")
+ return line
+}
+
+func newAssignAST(p *parser, lhsBytes []byte, rhsBytes []byte, op string) (*assignAST, error) {
+ lhs, _, err := parseExpr(lhsBytes, nil, parseOp{alloc: true})
+ if err != nil {
+ return nil, err
+ }
+ rhs, _, err := parseExpr(rhsBytes, nil, parseOp{alloc: true})
+ if err != nil {
+ return nil, err
+ }
+ opt := ""
+ if p != nil {
+ opt = p.defOpt
+ }
+ return &assignAST{
+ lhs: lhs,
+ rhs: rhs,
+ op: op,
+ opt: opt,
+ }, nil
+}
+
+func (p *parser) handleDirective(line []byte, directives map[string]directiveFunc) bool {
+ w, data := firstWord(line)
+ if d, ok := directives[string(w)]; ok {
+ d(p, data)
+ return true
+ }
+ return false
+}
+
+func (p *parser) handleRuleOrAssign(line []byte) {
+ rline := line
+ var semi []byte
+ if i := findLiteralChar(line, ';', 0, skipVar); i >= 0 {
+ // preserve after semicolon
+ semi = append(semi, line[i+1:]...)
+ rline = concatline(line[:i])
+ } else {
+ rline = concatline(line)
+ }
+ if p.handleAssign(line) {
+ return
+ }
+ // not assignment.
+ // ie. no '=' found or ':' found before '=' (except ':=')
+ p.parseMaybeRule(rline, semi)
+ return
+}
+
+func (p *parser) handleAssign(line []byte) bool {
+ aline, _ := removeComment(concatline(line))
+ aline = trimLeftSpaceBytes(aline)
+ if len(aline) == 0 {
+ return false
+ }
+ // fmt.Printf("assign: %q=>%q\n", line, aline)
+ i := findLiteralChar(aline, ':', '=', skipVar)
+ if i >= 0 {
+ if aline[i] == '=' {
+ p.parseAssign(aline, i)
+ return true
+ }
+ if aline[i] == ':' && i+1 < len(aline) && aline[i+1] == '=' {
+ p.parseAssign(aline, i+1)
+ return true
+ }
+ }
+ return false
+}
+
+func (p *parser) parseAssign(line []byte, sep int) {
+ lhs, op, rhs := line[:sep], line[sep:sep+1], line[sep+1:]
+ if sep > 0 {
+ switch line[sep-1] {
+ case ':', '+', '?':
+ lhs, op = line[:sep-1], line[sep-1:sep+1]
+ }
+ }
+ glog.V(1).Infof("parseAssign %s op:%q opt:%s", line, op, p.defOpt)
+ lhs = trimSpaceBytes(lhs)
+ rhs = trimLeftSpaceBytes(rhs)
+ aast, err := newAssignAST(p, lhs, rhs, string(op))
+ if err != nil {
+ p.err = err
+ return
+ }
+ aast.srcpos = p.srcpos()
+ p.addStatement(aast)
+}
+
+func (p *parser) parseMaybeRule(line, semi []byte) {
+ if len(line) == 0 {
+ p.err = p.srcpos().errorf("*** missing rule before commands.")
+ return
+ }
+ if line[0] == '\t' {
+ p.err = p.srcpos().errorf("*** commands commence before first target.")
+ return
+ }
+ var assign *assignAST
+ ci := findLiteralChar(line, ':', 0, skipVar)
+ if ci >= 0 {
+ eqi := findLiteralChar(line[ci+1:], '=', 0, skipVar)
+ if eqi == 0 {
+ panic(fmt.Sprintf("unexpected eq after colon: %q", line))
+ }
+ if eqi > 0 {
+ var lhsbytes []byte
+ op := "="
+ switch line[ci+1+eqi-1] {
+ case ':', '+', '?':
+ lhsbytes = append(lhsbytes, line[ci+1:ci+1+eqi-1]...)
+ op = string(line[ci+1+eqi-1 : ci+1+eqi+1])
+ default:
+ lhsbytes = append(lhsbytes, line[ci+1:ci+1+eqi]...)
+ }
+
+ lhsbytes = trimSpaceBytes(lhsbytes)
+ lhs, _, err := parseExpr(lhsbytes, nil, parseOp{})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+ var rhsbytes []byte
+ rhsbytes = append(rhsbytes, line[ci+1+eqi+1:]...)
+ if semi != nil {
+ rhsbytes = append(rhsbytes, ';')
+ rhsbytes = append(rhsbytes, concatline(semi)...)
+ }
+ rhsbytes = trimLeftSpaceBytes(rhsbytes)
+ semi = nil
+ rhs, _, err := parseExpr(rhsbytes, nil, parseOp{})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+
+ // TODO(ukai): support override, export in target specific var.
+ assign = &assignAST{
+ lhs: lhs,
+ rhs: rhs,
+ op: op,
+ }
+ assign.srcpos = p.srcpos()
+ line = line[:ci+1]
+ }
+ }
+ expr, _, err := parseExpr(line, nil, parseOp{})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+ // TODO(ukai): remove ast, and eval here.
+ rast := &maybeRuleAST{
+ isRule: ci >= 0,
+ expr: expr,
+ assign: assign,
+ semi: semi,
+ }
+ rast.srcpos = p.srcpos()
+ glog.V(1).Infof("stmt: %#v", rast)
+ p.addStatement(rast)
+}
+
+func (p *parser) parseInclude(op string, line []byte) {
+ // TODO(ukai): parse expr here
+ iast := &includeAST{
+ expr: string(line),
+ op: op,
+ }
+ iast.srcpos = p.srcpos()
+ p.addStatement(iast)
+}
+
+func (p *parser) parseIfdef(op string, data []byte) {
+ lhs, _, err := parseExpr(data, nil, parseOp{alloc: true})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+ iast := &ifAST{
+ op: op,
+ lhs: lhs,
+ }
+ iast.srcpos = p.srcpos()
+ p.addStatement(iast)
+ p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest})
+ p.outStmts = &iast.trueStmts
+}
+
+func (p *parser) parseTwoQuotes(s []byte) (string, string, []byte, bool) {
+ var args []string
+ for i := 0; i < 2; i++ {
+ s = trimSpaceBytes(s)
+ if len(s) == 0 {
+ return "", "", nil, false
+ }
+ quote := s[0]
+ if quote != '\'' && quote != '"' {
+ return "", "", nil, false
+ }
+ end := bytes.IndexByte(s[1:], quote) + 1
+ if end < 0 {
+ return "", "", nil, false
+ }
+ args = append(args, string(s[1:end]))
+ s = s[end+1:]
+ }
+ return args[0], args[1], s, true
+}
+
+// parse
+// "(lhs, rhs)"
+// "lhs, rhs"
+func (p *parser) parseEq(s []byte) (string, string, []byte, bool) {
+ if len(s) == 0 {
+ return "", "", nil, false
+ }
+ if s[0] == '(' {
+ in := s[1:]
+ glog.V(1).Infof("parseEq ( %q )", in)
+ term := []byte{','}
+ v, n, err := parseExpr(in, term, parseOp{matchParen: true})
+ if err != nil {
+ glog.V(1).Infof("parse eq: %q: %v", in, err)
+ return "", "", nil, false
+ }
+ lhs := v.String()
+ n++
+ n += skipSpaces(in[n:], nil)
+ term = []byte{')'}
+ in = in[n:]
+ v, n, err = parseExpr(in, term, parseOp{matchParen: true})
+ if err != nil {
+ glog.V(1).Infof("parse eq 2nd: %q: %v", in, err)
+ return "", "", nil, false
+ }
+ rhs := v.String()
+ in = in[n+1:]
+ in = trimSpaceBytes(in)
+ return lhs, rhs, in, true
+ }
+ return p.parseTwoQuotes(s)
+}
+
+func (p *parser) parseIfeq(op string, data []byte) {
+ lhsBytes, rhsBytes, extra, ok := p.parseEq(data)
+ if !ok {
+ p.err = p.srcpos().errorf(`*** invalid syntax in conditional.`)
+ return
+ }
+ if len(extra) > 0 {
+ glog.V(1).Infof("extra %q", extra)
+ warnNoPrefix(p.srcpos(), `extraneous text after %q directive`, op)
+ }
+
+ lhs, _, err := parseExpr([]byte(lhsBytes), nil, parseOp{matchParen: true})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+ rhs, _, err := parseExpr([]byte(rhsBytes), nil, parseOp{matchParen: true})
+ if err != nil {
+ p.err = p.srcpos().error(err)
+ return
+ }
+
+ iast := &ifAST{
+ op: op,
+ lhs: lhs,
+ rhs: rhs,
+ }
+ iast.srcpos = p.srcpos()
+ p.addStatement(iast)
+ p.ifStack = append(p.ifStack, ifState{ast: iast, numNest: p.numIfNest})
+ p.outStmts = &iast.trueStmts
+}
+
+func (p *parser) checkIfStack(curKeyword string) error {
+ if len(p.ifStack) == 0 {
+ return p.srcpos().errorf(`*** extraneous %q.`, curKeyword)
+ }
+ return nil
+}
+
+func (p *parser) parseElse(data []byte) {
+ err := p.checkIfStack("else")
+ if err != nil {
+ p.err = err
+ return
+ }
+ state := &p.ifStack[len(p.ifStack)-1]
+ if state.inElse {
+ p.err = p.srcpos().errorf(`*** only one "else" per conditional.`)
+ return
+ }
+ state.inElse = true
+ p.outStmts = &state.ast.falseStmts
+
+ nextIf := data
+ if len(nextIf) == 0 {
+ return
+ }
+ var ifDirectives = map[string]directiveFunc{
+ "ifdef": ifdefDirective,
+ "ifndef": ifndefDirective,
+ "ifeq": ifeqDirective,
+ "ifneq": ifneqDirective,
+ }
+ p.numIfNest = state.numNest + 1
+ if p.handleDirective(nextIf, ifDirectives) {
+ p.numIfNest = 0
+ return
+ }
+ p.numIfNest = 0
+ warnNoPrefix(p.srcpos(), "extraneous text after `else' directive")
+ return
+}
+
+func (p *parser) parseEndif(data []byte) {
+ err := p.checkIfStack("endif")
+ if err != nil {
+ p.err = err
+ return
+ }
+ state := p.ifStack[len(p.ifStack)-1]
+ for t := 0; t <= state.numNest; t++ {
+ p.ifStack = p.ifStack[0 : len(p.ifStack)-1]
+ if len(p.ifStack) == 0 {
+ p.outStmts = &p.mk.stmts
+ } else {
+ state := p.ifStack[len(p.ifStack)-1]
+ if state.inElse {
+ p.outStmts = &state.ast.falseStmts
+ } else {
+ p.outStmts = &state.ast.trueStmts
+ }
+ }
+ }
+ if len(trimSpaceBytes(data)) > 0 {
+ warnNoPrefix(p.srcpos(), "extraneous text after `endif' directive")
+ }
+ return
+}
+
+func (p *parser) parseDefine(data []byte) {
+ p.defineVar = nil
+ p.inDef = nil
+ p.defineVar = append(p.defineVar, trimSpaceBytes(data)...)
+ return
+}
+
+func (p *parser) parseVpath(data []byte) {
+ vline, _ := removeComment(concatline(data))
+ vline = trimLeftSpaceBytes(vline)
+ v, _, err := parseExpr(vline, nil, parseOp{})
+ if err != nil {
+ p.err = p.srcpos().errorf("parse error %q: %v", string(vline), err)
+ return
+ }
+ vast := &vpathAST{
+ expr: v,
+ }
+ vast.srcpos = p.srcpos()
+ p.addStatement(vast)
+}
+
+type directiveFunc func(*parser, []byte)
+
+var makeDirectives map[string]directiveFunc
+
+func init() {
+ makeDirectives = map[string]directiveFunc{
+ "include": includeDirective,
+ "-include": sincludeDirective,
+ "sinclude": sincludeDirective,
+ "ifdef": ifdefDirective,
+ "ifndef": ifndefDirective,
+ "ifeq": ifeqDirective,
+ "ifneq": ifneqDirective,
+ "else": elseDirective,
+ "endif": endifDirective,
+ "define": defineDirective,
+ "override": overrideDirective,
+ "export": exportDirective,
+ "unexport": unexportDirective,
+ "vpath": vpathDirective,
+ }
+}
+
+func includeDirective(p *parser, data []byte) {
+ p.parseInclude("include", data)
+}
+
+func sincludeDirective(p *parser, data []byte) {
+ p.parseInclude("-include", data)
+}
+
+func ifdefDirective(p *parser, data []byte) {
+ p.parseIfdef("ifdef", data)
+}
+
+func ifndefDirective(p *parser, data []byte) {
+ p.parseIfdef("ifndef", data)
+}
+
+func ifeqDirective(p *parser, data []byte) {
+ p.parseIfeq("ifeq", data)
+}
+
+func ifneqDirective(p *parser, data []byte) {
+ p.parseIfeq("ifneq", data)
+}
+
+func elseDirective(p *parser, data []byte) {
+ p.parseElse(data)
+}
+
+func endifDirective(p *parser, data []byte) {
+ p.parseEndif(data)
+}
+
+func defineDirective(p *parser, data []byte) {
+ p.parseDefine(data)
+}
+
+func overrideDirective(p *parser, data []byte) {
+ p.defOpt = "override"
+ defineDirective := map[string]directiveFunc{
+ "define": defineDirective,
+ }
+ glog.V(1).Infof("override define? %q", data)
+ if p.handleDirective(data, defineDirective) {
+ return
+ }
+ // e.g. overrider foo := bar
+ // line will be "foo := bar".
+ if p.handleAssign(data) {
+ return
+ }
+ p.defOpt = ""
+ var line []byte
+ line = append(line, []byte("override ")...)
+ line = append(line, data...)
+ p.handleRuleOrAssign(line)
+ // TODO(ukai): evaluate now to detect invalid "override" directive here?
+}
+
+func handleExport(p *parser, data []byte, export bool) (hasEqual bool) {
+ i := bytes.IndexByte(data, '=')
+ if i > 0 {
+ hasEqual = true
+ switch data[i-1] {
+ case ':', '+', '?':
+ i--
+ }
+ data = data[:i]
+ }
+ east := &exportAST{
+ expr: data,
+ hasEqual: hasEqual,
+ export: export,
+ }
+ east.srcpos = p.srcpos()
+ glog.V(1).Infof("export %v", east)
+ p.addStatement(east)
+ return hasEqual
+}
+
+func exportDirective(p *parser, data []byte) {
+ p.defOpt = "export"
+ defineDirective := map[string]directiveFunc{
+ "define": defineDirective,
+ }
+ glog.V(1).Infof("export define? %q", data)
+ if p.handleDirective(data, defineDirective) {
+ return
+ }
+
+ if !handleExport(p, data, true) {
+ return
+ }
+
+ // e.g. export foo := bar
+ // line will be "foo := bar".
+ p.handleAssign(data)
+}
+
+func unexportDirective(p *parser, data []byte) {
+ handleExport(p, data, false)
+ return
+}
+
+func vpathDirective(p *parser, data []byte) {
+ p.parseVpath(data)
+}
+
+func (p *parser) parse() (mk makefile, err error) {
+ for !p.done {
+ line := p.readLine()
+ if glog.V(1) {
+ glog.Infof("%s: %q", p.srcpos(), line)
+ }
+ if p.defineVar != nil {
+ p.processDefine(line)
+ if p.err != nil {
+ return makefile{}, p.err
+ }
+ continue
+ }
+ p.defOpt = ""
+ if p.inRecipe {
+ if len(line) > 0 && line[0] == '\t' {
+ cast := &commandAST{cmd: string(line[1:])}
+ cast.srcpos = p.srcpos()
+ p.addStatement(cast)
+ continue
+ }
+ }
+ p.parseLine(line)
+ if p.err != nil {
+ return makefile{}, p.err
+ }
+ }
+ return p.mk, p.err
+}
+
+func (p *parser) parseLine(line []byte) {
+ cline := concatline(line)
+ if len(cline) == 0 {
+ return
+ }
+ if glog.V(1) {
+ glog.Infof("concatline:%q", cline)
+ }
+ var dline []byte
+ cline, _ = removeComment(cline)
+ dline = append(dline, cline...)
+ dline = trimSpaceBytes(dline)
+ if len(dline) == 0 {
+ return
+ }
+ if glog.V(1) {
+ glog.Infof("directive?: %q", dline)
+ }
+ if p.handleDirective(dline, makeDirectives) {
+ return
+ }
+ if glog.V(1) {
+ glog.Infof("rule or assign?: %q", line)
+ }
+ p.handleRuleOrAssign(line)
+}
+
+func (p *parser) processDefine(line []byte) {
+ line = append(line, '\n')
+ line = concatline(line)
+ if line[len(line)-1] != '\n' {
+ line = append(line, '\n')
+ }
+ if glog.V(1) {
+ glog.Infof("concatline:%q", line)
+ }
+ if !p.isEndef(line) {
+ p.inDef = append(p.inDef, line...)
+ if p.inDef == nil {
+ p.inDef = []byte{}
+ }
+ return
+ }
+ if p.inDef[len(p.inDef)-1] == '\n' {
+ p.inDef = p.inDef[:len(p.inDef)-1]
+ }
+ glog.V(1).Infof("multilineAssign %q %q", p.defineVar, p.inDef)
+ aast, err := newAssignAST(p, p.defineVar, p.inDef, "=")
+ if err != nil {
+ p.err = p.srcpos().errorf("assign error %q=%q: %v", p.defineVar, p.inDef, err)
+ return
+ }
+ aast.srcpos = p.srcpos()
+ aast.srcpos.lineno -= bytes.Count(p.inDef, []byte{'\n'})
+ p.addStatement(aast)
+ p.defineVar = nil
+ p.inDef = nil
+ return
+}
+
+func (p *parser) isEndef(line []byte) bool {
+ if bytes.Equal(line, []byte("endef")) {
+ return true
+ }
+ w, data := firstWord(line)
+ if bytes.Equal(w, []byte("endef")) {
+ data, _ = removeComment(data)
+ data = trimLeftSpaceBytes(data)
+ if len(data) > 0 {
+ warnNoPrefix(p.srcpos(), `extraneous text after "endef" directive`)
+ }
+ return true
+ }
+ return false
+}
+
+func defaultMakefile() (string, error) {
+ candidates := []string{"GNUmakefile", "makefile", "Makefile"}
+ for _, filename := range candidates {
+ if exists(filename) {
+ return filename, nil
+ }
+ }
+ return "", errors.New("no targets specified and no makefile found")
+}
+
+func parseMakefileReader(rd io.Reader, loc srcpos) (makefile, error) {
+ parser := newParser(rd, loc.filename)
+ parser.lineno = loc.lineno
+ parser.elineno = loc.lineno
+ parser.linenoFixed = true
+ return parser.parse()
+}
+
+func parseMakefileString(s string, loc srcpos) (makefile, error) {
+ return parseMakefileReader(strings.NewReader(s), loc)
+}
+
+func parseMakefileBytes(s []byte, loc srcpos) (makefile, error) {
+ return parseMakefileReader(bytes.NewReader(s), loc)
+}
+
+type mkCacheEntry struct {
+ mk makefile
+ hash [sha1.Size]byte
+ err error
+ ts int64
+}
+
+type makefileCacheT struct {
+ mu sync.Mutex
+ mk map[string]mkCacheEntry
+}
+
+var makefileCache = &makefileCacheT{
+ mk: make(map[string]mkCacheEntry),
+}
+
+func (mc *makefileCacheT) lookup(filename string) (makefile, [sha1.Size]byte, bool, error) {
+ var hash [sha1.Size]byte
+ mc.mu.Lock()
+ c, present := mc.mk[filename]
+ mc.mu.Unlock()
+ if !present {
+ return makefile{}, hash, false, nil
+ }
+ ts := getTimestamp(filename)
+ if ts < 0 || ts >= c.ts {
+ return makefile{}, hash, false, nil
+ }
+ return c.mk, c.hash, true, c.err
+}
+
+func (mc *makefileCacheT) parse(filename string) (makefile, [sha1.Size]byte, error) {
+ glog.Infof("parse Makefile %q", filename)
+ mk, hash, ok, err := makefileCache.lookup(filename)
+ if ok {
+ if glog.V(1) {
+ glog.Infof("makefile cache hit for %q", filename)
+ }
+ return mk, hash, err
+ }
+ if glog.V(1) {
+ glog.Infof("reading makefile %q", filename)
+ }
+ c, err := ioutil.ReadFile(filename)
+ if err != nil {
+ return makefile{}, hash, err
+ }
+ hash = sha1.Sum(c)
+ mk, err = parseMakefile(c, filename)
+ if err != nil {
+ return makefile{}, hash, err
+ }
+ makefileCache.mu.Lock()
+ makefileCache.mk[filename] = mkCacheEntry{
+ mk: mk,
+ hash: hash,
+ err: err,
+ ts: time.Now().Unix(),
+ }
+ makefileCache.mu.Unlock()
+ return mk, hash, err
+}
+
+func parseMakefile(s []byte, filename string) (makefile, error) {
+ parser := newParser(bytes.NewReader(s), filename)
+ return parser.parse()
+}
diff --git a/golang/kati/pathutil.go b/golang/kati/pathutil.go
new file mode 100644
index 0000000..ad11c22
--- /dev/null
+++ b/golang/kati/pathutil.go
@@ -0,0 +1,945 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "sync"
+ "syscall"
+
+ "github.com/golang/glog"
+)
+
+type fileid struct {
+ dev, ino uint64
+}
+
+var (
+ unknownFileid = fileid{}
+ invalidFileid = fileid{dev: 1<<64 - 1, ino: 1<<64 - 1}
+)
+
+type dirent struct {
+ id fileid
+ name string
+ lmode os.FileMode
+ mode os.FileMode
+ // add other fields to support more find commands?
+}
+
+type fsCacheT struct {
+ mu sync.Mutex
+ ids map[string]fileid
+ dirents map[fileid][]dirent
+}
+
+var fsCache = &fsCacheT{
+ ids: make(map[string]fileid),
+ dirents: map[fileid][]dirent{
+ invalidFileid: nil,
+ },
+}
+
+func init() {
+ fsCache.readdir(".", unknownFileid)
+}
+
+func (c *fsCacheT) dirs() int {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ return len(c.dirents)
+}
+
+func (c *fsCacheT) files() int {
+ c.mu.Lock()
+ defer c.mu.Unlock()
+ n := 0
+ for _, ents := range c.dirents {
+ n += len(ents)
+ }
+ return n
+}
+
+func hasWildcardMeta(pat string) bool {
+ return strings.IndexAny(pat, "*?[") >= 0
+}
+
+func hasWildcardMetaByte(pat []byte) bool {
+ return bytes.IndexAny(pat, "*?[") >= 0
+}
+
+func wildcardUnescape(pat string) string {
+ var buf bytes.Buffer
+ for i := 0; i < len(pat); i++ {
+ if pat[i] == '\\' && i+1 < len(pat) {
+ switch pat[i+1] {
+ case '*', '?', '[', '\\':
+ buf.WriteByte(pat[i])
+ }
+ continue
+ }
+ buf.WriteByte(pat[i])
+ }
+ return buf.String()
+}
+
+func filepathJoin(names ...string) string {
+ var dir string
+ for i, n := range names {
+ dir += n
+ if i != len(names)-1 && n != "" && n[len(n)-1] != '/' {
+ dir += "/"
+ }
+ }
+ return dir
+}
+
+func filepathClean(path string) string {
+ var names []string
+ if filepath.IsAbs(path) {
+ names = append(names, "")
+ }
+ paths := strings.Split(path, string(filepath.Separator))
+Loop:
+ for _, n := range paths {
+ if n == "" || n == "." {
+ continue Loop
+ }
+ if n == ".." && len(names) > 0 {
+ dir, last := names[:len(names)-1], names[len(names)-1]
+ parent := strings.Join(dir, string(filepath.Separator))
+ if parent == "" {
+ parent = "."
+ }
+ _, ents := fsCache.readdir(parent, unknownFileid)
+ for _, e := range ents {
+ if e.name != last {
+ continue
+ }
+ if e.lmode&os.ModeSymlink == os.ModeSymlink && e.mode&os.ModeDir == os.ModeDir {
+ // preserve .. if last is symlink dir.
+ names = append(names, "..")
+ continue Loop
+ }
+ // last is not symlink, maybe safe to clean.
+ names = names[:len(names)-1]
+ continue Loop
+ }
+ // parent doesn't exists? preserve ..
+ names = append(names, "..")
+ continue Loop
+ }
+ names = append(names, n)
+ }
+ if len(names) == 0 {
+ return "."
+ }
+ return strings.Join(names, string(filepath.Separator))
+}
+
+func (c *fsCacheT) fileid(dir string) fileid {
+ c.mu.Lock()
+ id := c.ids[dir]
+ c.mu.Unlock()
+ return id
+}
+
+func (c *fsCacheT) readdir(dir string, id fileid) (fileid, []dirent) {
+ glog.V(3).Infof("readdir: %s [%v]", dir, id)
+ c.mu.Lock()
+ if id == unknownFileid {
+ id = c.ids[dir]
+ }
+ ents, ok := c.dirents[id]
+ c.mu.Unlock()
+ if ok {
+ return id, ents
+ }
+ glog.V(3).Infof("opendir: %s", dir)
+ d, err := os.Open(dir)
+ if err != nil {
+ c.mu.Lock()
+ c.ids[dir] = invalidFileid
+ c.mu.Unlock()
+ return invalidFileid, nil
+ }
+ defer d.Close()
+ fi, err := d.Stat()
+ if err != nil {
+ c.mu.Lock()
+ c.ids[dir] = invalidFileid
+ c.mu.Unlock()
+ return invalidFileid, nil
+ }
+ if stat, ok := fi.Sys().(*syscall.Stat_t); ok {
+ id = fileid{dev: uint64(stat.Dev), ino: stat.Ino}
+ }
+ names, _ := d.Readdirnames(-1)
+ // need sort?
+ ents = nil
+ var path string
+ for _, name := range names {
+ path = filepath.Join(dir, name)
+ fi, err := os.Lstat(path)
+ if err != nil {
+ glog.Warningf("readdir %s: %v", name, err)
+ ents = append(ents, dirent{name: name})
+ continue
+ }
+ lmode := fi.Mode()
+ mode := lmode
+ var id fileid
+ if stat, ok := fi.Sys().(*syscall.Stat_t); ok {
+ id = fileid{dev: uint64(stat.Dev), ino: stat.Ino}
+ }
+ if lmode&os.ModeSymlink == os.ModeSymlink {
+ fi, err = os.Stat(path)
+ if err != nil {
+ glog.Warningf("readdir %s: %v", name, err)
+ } else {
+ mode = fi.Mode()
+ if stat, ok := fi.Sys().(*syscall.Stat_t); ok {
+ id = fileid{dev: uint64(stat.Dev), ino: stat.Ino}
+ }
+ }
+ }
+ ents = append(ents, dirent{id: id, name: name, lmode: lmode, mode: mode})
+ }
+ glog.V(3).Infof("readdir:%s => %v: %v", dir, id, ents)
+ c.mu.Lock()
+ c.ids[dir] = id
+ c.dirents[id] = ents
+ c.mu.Unlock()
+ return id, ents
+}
+
+// glob searches for files matching pattern in the directory dir
+// and appends them to matches. ignore I/O errors.
+func (c *fsCacheT) glob(dir, pattern string, matches []string) ([]string, error) {
+ _, ents := c.readdir(filepathClean(dir), unknownFileid)
+ switch dir {
+ case "", string(filepath.Separator):
+ // nothing
+ default:
+ dir += string(filepath.Separator) // add trailing separator back
+ }
+ for _, ent := range ents {
+ matched, err := filepath.Match(pattern, ent.name)
+ if err != nil {
+ return nil, err
+ }
+ if matched {
+ matches = append(matches, dir+ent.name)
+ }
+ }
+ return matches, nil
+}
+
+func (c *fsCacheT) Glob(pat string) ([]string, error) {
+ // TODO(ukai): expand ~ to user's home directory.
+ // TODO(ukai): use find cache for glob if exists
+ // or use wildcardCache for find cache.
+ pat = wildcardUnescape(pat)
+ dir, file := filepath.Split(pat)
+ switch dir {
+ case "", string(filepath.Separator):
+ // nothing
+ default:
+ dir = dir[:len(dir)-1] // chop off trailing separator
+ }
+ if !hasWildcardMeta(dir) {
+ return c.glob(dir, file, nil)
+ }
+
+ m, err := c.Glob(dir)
+ if err != nil {
+ return nil, err
+ }
+ var matches []string
+ for _, d := range m {
+ matches, err = c.glob(d, file, matches)
+ if err != nil {
+ return nil, err
+ }
+ }
+ return matches, nil
+}
+
+func wildcard(w evalWriter, pat string) error {
+ files, err := fsCache.Glob(pat)
+ if err != nil {
+ return err
+ }
+ for _, file := range files {
+ w.writeWordString(file)
+ }
+ return nil
+}
+
+type findOp interface {
+ apply(evalWriter, string, dirent) (test bool, prune bool)
+}
+
+type findOpName string
+
+func (op findOpName) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ matched, err := filepath.Match(string(op), ent.name)
+ if err != nil {
+ glog.Warningf("find -name %q: %v", string(op), err)
+ return false, false
+ }
+ return matched, false
+}
+
+type findOpType struct {
+ mode os.FileMode
+ followSymlinks bool
+}
+
+func (op findOpType) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ mode := ent.lmode
+ if op.followSymlinks && ent.mode != 0 {
+ mode = ent.mode
+ }
+ return op.mode&mode == op.mode, false
+}
+
+type findOpRegular struct {
+ followSymlinks bool
+}
+
+func (op findOpRegular) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ mode := ent.lmode
+ if op.followSymlinks && ent.mode != 0 {
+ mode = ent.mode
+ }
+ return mode.IsRegular(), false
+}
+
+type findOpNot struct {
+ op findOp
+}
+
+func (op findOpNot) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ test, prune := op.op.apply(w, path, ent)
+ return !test, prune
+}
+
+type findOpAnd []findOp
+
+func (op findOpAnd) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ var prune bool
+ for _, o := range op {
+ test, p := o.apply(w, path, ent)
+ if p {
+ prune = true
+ }
+ if !test {
+ return test, prune
+ }
+ }
+ return true, prune
+}
+
+type findOpOr struct {
+ op1, op2 findOp
+}
+
+func (op findOpOr) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ test, prune := op.op1.apply(w, path, ent)
+ if test {
+ return test, prune
+ }
+ return op.op2.apply(w, path, ent)
+}
+
+type findOpPrune struct{}
+
+func (op findOpPrune) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ return true, true
+}
+
+type findOpPrint struct{}
+
+func (op findOpPrint) apply(w evalWriter, path string, ent dirent) (bool, bool) {
+ var name string
+ if path == "" {
+ name = ent.name
+ } else if ent.name == "." {
+ name = path
+ } else {
+ name = filepathJoin(path, ent.name)
+ }
+ glog.V(3).Infof("find print: %s", name)
+ w.writeWordString(name)
+ return true, false
+}
+
+func (c *fsCacheT) find(w evalWriter, fc findCommand, path string, id fileid, depth int, seen map[fileid]string) {
+ glog.V(2).Infof("find: path:%s id:%v depth:%d", path, id, depth)
+ id, ents := c.readdir(filepathClean(filepathJoin(fc.chdir, path)), id)
+ if ents == nil {
+ glog.V(1).Infof("find: %s %s not found", fc.chdir, path)
+ return
+ }
+ for _, ent := range ents {
+ glog.V(3).Infof("find: path:%s ent:%s depth:%d", path, ent.name, depth)
+ _, prune := fc.apply(w, path, ent)
+ mode := ent.lmode
+ if fc.followSymlinks {
+ if mode&os.ModeSymlink == os.ModeSymlink {
+ lpath := filepathJoin(path, ent.name)
+ if p, ok := seen[ent.id]; ok {
+ // stderr?
+ glog.Errorf("find: File system loop detected; `%s' is part of the same file system loop as `%s'.", lpath, p)
+ return
+ }
+ seen[ent.id] = lpath
+ }
+ mode = ent.mode
+ }
+ if !mode.IsDir() {
+ glog.V(3).Infof("find: not dir: %s/%s", path, ent.name)
+ continue
+ }
+ if prune {
+ glog.V(3).Infof("find: prune: %s", path)
+ continue
+ }
+ if depth >= fc.depth {
+ glog.V(3).Infof("find: depth: %d >= %d", depth, fc.depth)
+ continue
+ }
+ c.find(w, fc, filepathJoin(path, ent.name), ent.id, depth+1, seen)
+ }
+}
+
+type findCommand struct {
+ testdir string // before chdir
+ chdir string
+ finddirs []string // after chdir
+ followSymlinks bool
+ ops []findOp
+ depth int
+}
+
+func parseFindCommand(cmd string) (findCommand, error) {
+ if !strings.Contains(cmd, "find") {
+ return findCommand{}, errNotFind
+ }
+ fcp := findCommandParser{
+ shellParser: shellParser{
+ cmd: cmd,
+ },
+ }
+ err := fcp.parse()
+ if err != nil {
+ return fcp.fc, err
+ }
+ if len(fcp.fc.finddirs) == 0 {
+ fcp.fc.finddirs = append(fcp.fc.finddirs, ".")
+ }
+ if fcp.fc.chdir != "" {
+ fcp.fc.chdir = filepathClean(fcp.fc.chdir)
+ }
+ if filepath.IsAbs(fcp.fc.chdir) {
+ return fcp.fc, errFindAbspath
+ }
+ for _, dir := range fcp.fc.finddirs {
+ if filepath.IsAbs(dir) {
+ return fcp.fc, errFindAbspath
+ }
+ }
+ glog.V(3).Infof("find command: %#v", fcp.fc)
+
+ // TODO(ukai): handle this in run() instead of fallback shell.
+ _, ents := fsCache.readdir(filepathClean(fcp.fc.testdir), unknownFileid)
+ if ents == nil {
+ glog.V(1).Infof("find: testdir %s - not dir", fcp.fc.testdir)
+ return fcp.fc, errFindNoSuchDir
+ }
+ _, ents = fsCache.readdir(filepathClean(fcp.fc.chdir), unknownFileid)
+ if ents == nil {
+ glog.V(1).Infof("find: cd %s: No such file or directory", fcp.fc.chdir)
+ return fcp.fc, errFindNoSuchDir
+ }
+
+ return fcp.fc, nil
+}
+
+func (fc findCommand) run(w evalWriter) {
+ glog.V(3).Infof("find: %#v", fc)
+ for _, dir := range fc.finddirs {
+ seen := make(map[fileid]string)
+ id, _ := fsCache.readdir(filepathClean(filepathJoin(fc.chdir, dir)), unknownFileid)
+ _, prune := fc.apply(w, dir, dirent{id: id, name: ".", mode: os.ModeDir, lmode: os.ModeDir})
+ if prune {
+ glog.V(3).Infof("find: prune: %s", dir)
+ continue
+ }
+ if 0 >= fc.depth {
+ glog.V(3).Infof("find: depth: 0 >= %d", fc.depth)
+ continue
+ }
+ fsCache.find(w, fc, dir, id, 1, seen)
+ }
+}
+
+func (fc findCommand) apply(w evalWriter, path string, ent dirent) (test, prune bool) {
+ var p bool
+ for _, op := range fc.ops {
+ test, p = op.apply(w, path, ent)
+ if p {
+ prune = true
+ }
+ if !test {
+ break
+ }
+ }
+ glog.V(2).Infof("apply path:%s ent:%v => test=%t, prune=%t", path, ent, test, prune)
+ return test, prune
+}
+
+var (
+ errNotFind = errors.New("not find command")
+ errFindBackground = errors.New("find command: background")
+ errFindUnbalancedQuote = errors.New("find command: unbalanced quote")
+ errFindDupChdir = errors.New("find command: dup chdir")
+ errFindDupTestdir = errors.New("find command: dup testdir")
+ errFindExtra = errors.New("find command: extra")
+ errFindUnexpectedEnd = errors.New("find command: unexpected end")
+ errFindAbspath = errors.New("find command: abs path")
+ errFindNoSuchDir = errors.New("find command: no such dir")
+)
+
+type findCommandParser struct {
+ fc findCommand
+ shellParser
+}
+
+func (p *findCommandParser) parse() error {
+ p.fc.depth = 1<<31 - 1 // max int32
+ var hasIf bool
+ var hasFind bool
+ for {
+ tok, err := p.token()
+ if err == io.EOF || tok == "" {
+ if !hasFind {
+ return errNotFind
+ }
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ switch tok {
+ case "cd":
+ if p.fc.chdir != "" {
+ return errFindDupChdir
+ }
+ p.fc.chdir, err = p.token()
+ if err != nil {
+ return err
+ }
+ err = p.expect(";", "&&")
+ if err != nil {
+ return err
+ }
+ case "if":
+ err = p.expect("[")
+ if err != nil {
+ return err
+ }
+ if hasIf {
+ return errFindDupTestdir
+ }
+ err = p.parseTest()
+ if err != nil {
+ return err
+ }
+ err = p.expectSeq("]", ";", "then")
+ if err != nil {
+ return err
+ }
+ hasIf = true
+ case "test":
+ if hasIf {
+ return errFindDupTestdir
+ }
+ err = p.parseTest()
+ if err != nil {
+ return err
+ }
+ err = p.expect("&&")
+ if err != nil {
+ return err
+ }
+ case "find":
+ err = p.parseFind()
+ if err != nil {
+ return err
+ }
+ if hasIf {
+ err = p.expect("fi")
+ if err != nil {
+ return err
+ }
+ }
+ tok, err = p.token()
+ if err != io.EOF || tok != "" {
+ return errFindExtra
+ }
+ hasFind = true
+ return nil
+ }
+ }
+}
+
+func (p *findCommandParser) parseTest() error {
+ if p.fc.testdir != "" {
+ return errFindDupTestdir
+ }
+ err := p.expect("-d")
+ if err != nil {
+ return err
+ }
+ p.fc.testdir, err = p.token()
+ return err
+}
+
+func (p *findCommandParser) parseFind() error {
+ for {
+ tok, err := p.token()
+ if err == io.EOF || tok == "" || tok == ";" {
+ var print findOpPrint
+ if len(p.fc.ops) == 0 || p.fc.ops[len(p.fc.ops)-1] != print {
+ p.fc.ops = append(p.fc.ops, print)
+ }
+ return nil
+ }
+ if err != nil {
+ return err
+ }
+ if tok != "" && (tok[0] == '-' || tok == "\\(") {
+ p.unget(tok)
+ op, err := p.parseFindCond()
+ if err != nil {
+ return err
+ }
+ if op != nil {
+ p.fc.ops = append(p.fc.ops, op)
+ }
+ continue
+ }
+ p.fc.finddirs = append(p.fc.finddirs, tok)
+ }
+}
+
+func (p *findCommandParser) parseFindCond() (findOp, error) {
+ return p.parseExpr()
+}
+
+func (p *findCommandParser) parseExpr() (findOp, error) {
+ op, err := p.parseTerm()
+ if err != nil {
+ return nil, err
+ }
+ if op == nil {
+ return nil, nil
+ }
+ for {
+ tok, err := p.token()
+ if err == io.EOF || tok == "" {
+ return op, nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ if tok != "-or" && tok != "-o" {
+ p.unget(tok)
+ return op, nil
+ }
+ op2, err := p.parseTerm()
+ if err != nil {
+ return nil, err
+ }
+ op = findOpOr{op, op2}
+ }
+}
+
+func (p *findCommandParser) parseTerm() (findOp, error) {
+ op, err := p.parseFact()
+ if err != nil {
+ return nil, err
+ }
+ if op == nil {
+ return nil, nil
+ }
+ var ops []findOp
+ ops = append(ops, op)
+ for {
+ tok, err := p.token()
+ if err == io.EOF || tok == "" {
+ if len(ops) == 1 {
+ return ops[0], nil
+ }
+ return findOpAnd(ops), nil
+ }
+ if err != nil {
+ return nil, err
+ }
+ if tok != "-and" && tok != "-a" {
+ p.unget(tok)
+ }
+ op, err = p.parseFact()
+ if err != nil {
+ return nil, err
+ }
+ if op == nil {
+ if len(ops) == 1 {
+ return ops[0], nil
+ }
+ return findOpAnd(ops), nil
+ }
+ ops = append(ops, op) // findAndOp?
+ }
+}
+
+func (p *findCommandParser) parseFact() (findOp, error) {
+ tok, err := p.token()
+ if err != nil {
+ return nil, err
+ }
+ switch tok {
+ case "-L":
+ p.fc.followSymlinks = true
+ return nil, nil
+ case "-prune":
+ return findOpPrune{}, nil
+ case "-print":
+ return findOpPrint{}, nil
+ case "-maxdepth":
+ tok, err = p.token()
+ if err != nil {
+ return nil, err
+ }
+ i, err := strconv.ParseInt(tok, 10, 32)
+ if err != nil {
+ return nil, err
+ }
+ if i < 0 {
+ return nil, fmt.Errorf("find commnad: -maxdepth negative: %d", i)
+ }
+ p.fc.depth = int(i)
+ return nil, nil
+ case "-not", "\\!":
+ op, err := p.parseFact()
+ if err != nil {
+ return nil, err
+ }
+ return findOpNot{op}, nil
+ case "\\(":
+ op, err := p.parseExpr()
+ if err != nil {
+ return nil, err
+ }
+ err = p.expect("\\)")
+ if err != nil {
+ return nil, err
+ }
+ return op, nil
+ case "-name":
+ tok, err = p.token()
+ if err != nil {
+ return nil, err
+ }
+ return findOpName(tok), nil
+ case "-type":
+ tok, err = p.token()
+ if err != nil {
+ return nil, err
+ }
+ var m os.FileMode
+ switch tok {
+ case "b":
+ m = os.ModeDevice
+ case "c":
+ m = os.ModeDevice | os.ModeCharDevice
+ case "d":
+ m = os.ModeDir
+ case "p":
+ m = os.ModeNamedPipe
+ case "l":
+ m = os.ModeSymlink
+ case "f":
+ return findOpRegular{p.fc.followSymlinks}, nil
+ case "s":
+ m = os.ModeSocket
+ default:
+ return nil, fmt.Errorf("find command: unsupported -type %s", tok)
+ }
+ return findOpType{m, p.fc.followSymlinks}, nil
+ case "-o", "-or", "-a", "-and":
+ p.unget(tok)
+ return nil, nil
+ default:
+ if tok != "" && tok[0] == '-' {
+ return nil, fmt.Errorf("find command: unsupported %s", tok)
+ }
+ p.unget(tok)
+ return nil, nil
+ }
+}
+
+type findleavesCommand struct {
+ name string
+ dirs []string
+ prunes []string
+ mindepth int
+}
+
+func parseFindleavesCommand(cmd string) (findleavesCommand, error) {
+ if !strings.Contains(cmd, "build/tools/findleaves.py") {
+ return findleavesCommand{}, errNotFindleaves
+ }
+ fcp := findleavesCommandParser{
+ shellParser: shellParser{
+ cmd: cmd,
+ },
+ }
+ err := fcp.parse()
+ if err != nil {
+ return fcp.fc, err
+ }
+ glog.V(3).Infof("findleaves command: %#v", fcp.fc)
+ return fcp.fc, nil
+}
+
+func (fc findleavesCommand) run(w evalWriter) {
+ glog.V(3).Infof("findleaves: %#v", fc)
+ for _, dir := range fc.dirs {
+ seen := make(map[fileid]string)
+ id, _ := fsCache.readdir(filepathClean(dir), unknownFileid)
+ fc.walk(w, dir, id, 1, seen)
+ }
+}
+
+func (fc findleavesCommand) walk(w evalWriter, dir string, id fileid, depth int, seen map[fileid]string) {
+ glog.V(3).Infof("findleaves walk: dir:%d id:%v depth:%d", dir, id, depth)
+ id, ents := fsCache.readdir(filepathClean(dir), id)
+ var subdirs []dirent
+ for _, ent := range ents {
+ if ent.mode.IsDir() {
+ if fc.isPrune(ent.name) {
+ glog.V(3).Infof("findleaves prune %s in %s", ent.name, dir)
+ continue
+ }
+ subdirs = append(subdirs, ent)
+ continue
+ }
+ if depth < fc.mindepth {
+ glog.V(3).Infof("findleaves depth=%d mindepth=%d", depth, fc.mindepth)
+ continue
+ }
+ if ent.name == fc.name {
+ glog.V(2).Infof("findleaves %s in %s", ent.name, dir)
+ w.writeWordString(filepathJoin(dir, ent.name))
+ // no recurse subdirs
+ return
+ }
+ }
+ for _, subdir := range subdirs {
+ if subdir.lmode&os.ModeSymlink == os.ModeSymlink {
+ lpath := filepathJoin(dir, subdir.name)
+ if p, ok := seen[subdir.id]; ok {
+ // symlink loop detected.
+ glog.Errorf("findleaves: loop detected %q was %q", lpath, p)
+ continue
+ }
+ seen[subdir.id] = lpath
+ }
+ fc.walk(w, filepathJoin(dir, subdir.name), subdir.id, depth+1, seen)
+ }
+}
+
+func (fc findleavesCommand) isPrune(name string) bool {
+ for _, p := range fc.prunes {
+ if p == name {
+ return true
+ }
+ }
+ return false
+}
+
+var (
+ errNotFindleaves = errors.New("not findleaves command")
+ errFindleavesEmptyPrune = errors.New("findleaves: empty prune")
+ errFindleavesNoFilename = errors.New("findleaves: no filename")
+)
+
+type findleavesCommandParser struct {
+ fc findleavesCommand
+ shellParser
+}
+
+func (p *findleavesCommandParser) parse() error {
+ var args []string
+ p.fc.mindepth = -1
+ tok, err := p.token()
+ if err != nil {
+ return err
+ }
+ if tok != "build/tools/findleaves.py" {
+ return errNotFindleaves
+ }
+ for {
+ tok, err := p.token()
+ if err == io.EOF || tok == "" {
+ break
+ }
+ if err != nil {
+ return err
+ }
+ switch {
+ case strings.HasPrefix(tok, "--prune="):
+ prune := filepath.Base(strings.TrimPrefix(tok, "--prune="))
+ if prune == "" {
+ return errFindleavesEmptyPrune
+ }
+ p.fc.prunes = append(p.fc.prunes, prune)
+ case strings.HasPrefix(tok, "--mindepth="):
+ md := strings.TrimPrefix(tok, "--mindepth=")
+ i, err := strconv.ParseInt(md, 10, 32)
+ if err != nil {
+ return err
+ }
+ p.fc.mindepth = int(i)
+ default:
+ args = append(args, tok)
+ }
+ }
+ if len(args) < 2 {
+ return errFindleavesNoFilename
+ }
+ p.fc.dirs, p.fc.name = args[:len(args)-1], args[len(args)-1]
+ return nil
+}
diff --git a/golang/kati/pathutil_test.go b/golang/kati/pathutil_test.go
new file mode 100644
index 0000000..33b7e32
--- /dev/null
+++ b/golang/kati/pathutil_test.go
@@ -0,0 +1,800 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "os"
+ "path/filepath"
+ "reflect"
+ "strings"
+ "testing"
+)
+
+type mockfs struct {
+ id fileid
+ ofscache *fsCacheT
+}
+
+func newFS() *mockfs {
+ fs := &mockfs{
+ ofscache: fsCache,
+ }
+ fsCache = &fsCacheT{
+ ids: make(map[string]fileid),
+ dirents: make(map[fileid][]dirent),
+ }
+ fsCache.ids["."] = fs.dir(".").id
+ return fs
+}
+
+func (m *mockfs) dump(t *testing.T) {
+ t.Log("fs ids:")
+ for name, id := range fsCache.ids {
+ t.Logf(" %q=%v", name, id)
+ }
+ t.Log("fs dirents:")
+ for id, ents := range fsCache.dirents {
+ t.Logf(" %v:", id)
+ for _, ent := range ents {
+ t.Logf(" %#v", ent)
+ }
+ }
+}
+
+func (m *mockfs) close() {
+ fsCache = m.ofscache
+}
+
+func (m *mockfs) dirent(name string, mode os.FileMode) dirent {
+ id := m.id
+ m.id.ino++
+ return dirent{id: id, name: name, mode: mode, lmode: mode}
+}
+
+func (m *mockfs) addent(name string, ent dirent) {
+ dir, name := filepath.Split(name)
+ dir = strings.TrimSuffix(dir, string(filepath.Separator))
+ if dir == "" {
+ dir = "."
+ }
+ di, ok := fsCache.ids[dir]
+ if !ok {
+ if dir == "." {
+ panic(". not found:" + name)
+ }
+ de := m.add(m.dir, dir)
+ fsCache.ids[dir] = de.id
+ di = de.id
+ }
+ for _, e := range fsCache.dirents[di] {
+ if e.name == ent.name {
+ return
+ }
+ }
+ fsCache.dirents[di] = append(fsCache.dirents[di], ent)
+}
+
+func (m *mockfs) add(t func(string) dirent, name string) dirent {
+ ent := t(filepath.Base(name))
+ m.addent(name, ent)
+ return ent
+}
+
+func (m *mockfs) symlink(name string, ent dirent) {
+ lent := ent
+ lent.lmode = os.ModeSymlink
+ lent.name = filepath.Base(name)
+ m.addent(name, lent)
+}
+
+func (m *mockfs) dirref(name string) dirent {
+ id := fsCache.ids[name]
+ return dirent{id: id, name: filepath.Base(name), mode: os.ModeDir, lmode: os.ModeDir}
+}
+
+func (m *mockfs) notfound() dirent { return dirent{id: invalidFileid} }
+func (m *mockfs) dir(name string) dirent { return m.dirent(name, os.ModeDir) }
+func (m *mockfs) file(name string) dirent { return m.dirent(name, os.FileMode(0644)) }
+
+func TestFilepathClean(t *testing.T) {
+ fs := newFS()
+ defer fs.close()
+ di := fs.add(fs.dir, "dir")
+ fs.symlink("link", di)
+
+ fs.dump(t)
+
+ for _, tc := range []struct {
+ path string
+ want string
+ }{
+ {path: "foo", want: "foo"},
+ {path: ".", want: "."},
+ {path: "./", want: "."},
+ {path: ".///", want: "."},
+ {path: "", want: "."},
+ {path: "foo/bar", want: "foo/bar"},
+ {path: "./foo", want: "foo"},
+ {path: "foo///", want: "foo"},
+ {path: "foo//bar", want: "foo/bar"},
+ {path: "foo/../bar", want: "foo/../bar"}, // foo doesn't exist
+ {path: "dir/../bar", want: "bar"}, // dir is real dir
+ {path: "link/../bar", want: "link/../bar"}, // link is symlink
+ {path: "foo/./bar", want: "foo/bar"},
+ {path: "/foo/bar", want: "/foo/bar"},
+ } {
+ if got, want := filepathClean(tc.path), tc.want; got != want {
+ t.Errorf("filepathClean(%q)=%q; want=%q", tc.path, got, want)
+ }
+ }
+}
+
+func TestParseFindCommand(t *testing.T) {
+ fs := newFS()
+ defer fs.close()
+ fs.add(fs.dir, "testdir")
+
+ maxdepth := 1<<31 - 1
+ for _, tc := range []struct {
+ cmd string
+ want findCommand
+ }{
+ {
+ cmd: "find testdir",
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: "find .",
+ want: findCommand{
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: "find ",
+ want: findCommand{
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: "find testdir/../testdir",
+ want: findCommand{
+ finddirs: []string{"testdir/../testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: "find testdir -print",
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: "find testdir -name foo",
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpName("foo"), findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "file1"`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpName("file1"), findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1"`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpName("*1"), findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -and -name "file*"`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpAnd{findOpName("*1"), findOpName("file*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or -name "file*"`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpName("file*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or -type f`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpRegular{}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or -not -type f`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or \! -type f`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or -type d`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeDir}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -or -type l`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeSymlink}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name "*1" -a -type l -o -name "dir*"`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir \( -name "dir*" -o -name "*1" \) -a -type f`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpAnd([]findOp{findOpOr{findOpName("dir*"), findOpName("*1")}, findOpRegular{}}), findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `cd testdir && find`,
+ want: findCommand{
+ chdir: "testdir",
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `test -d testdir && find testdir`,
+ want: findCommand{
+ testdir: "testdir",
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `if [ -d testdir ] ; then find testdir ; fi`,
+ want: findCommand{
+ testdir: "testdir",
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `if [ -d testdir ]; then find testdir; fi`,
+ want: findCommand{
+ testdir: "testdir",
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `if [ -d testdir ]; then cd testdir && find .; fi`,
+ want: findCommand{
+ chdir: "testdir",
+ testdir: "testdir",
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -name dir2 -prune -o -name file1`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("dir2"), findOpPrune{}}), findOpName("file1")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir testdir`,
+ want: findCommand{
+ finddirs: []string{"testdir", "testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find -L testdir -type f`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ followSymlinks: true,
+ ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `cd testdir; find -L . -type f`,
+ want: findCommand{
+ chdir: "testdir",
+ finddirs: []string{"."},
+ followSymlinks: true,
+ ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ },
+ {
+ cmd: `find testdir -maxdepth 1`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: 1,
+ },
+ },
+ {
+ cmd: `find testdir -maxdepth 0`,
+ want: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: 0,
+ },
+ },
+ } {
+ fc, err := parseFindCommand(tc.cmd)
+ if err != nil {
+ t.Errorf("parseFindCommand(%q)=_, %v; want=_, <nil>", tc.cmd, err)
+ continue
+ }
+ if got, want := fc, tc.want; !reflect.DeepEqual(got, want) {
+ t.Errorf("parseFindCommand(%q)=%#v\n want=%#v\n", tc.cmd, got, want)
+ }
+ }
+
+}
+
+func TestParseFindCommandFail(t *testing.T) {
+ for _, cmd := range []string{
+ `find testdir -maxdepth hoge`,
+ `find testdir -maxdepth 1hoge`,
+ `find testdir -maxdepth -1`,
+ } {
+ _, err := parseFindCommand(cmd)
+ if err == nil {
+ t.Errorf("parseFindCommand(%q)=_, <nil>; want=_, err", cmd)
+ }
+ }
+}
+
+func TestFind(t *testing.T) {
+ fs := newFS()
+ defer fs.close()
+ fs.add(fs.file, "Makefile")
+ fs.add(fs.file, "testdir/file1")
+ fs.add(fs.file, "testdir/file2")
+ file1 := fs.add(fs.file, "testdir/dir1/file1")
+ dir1 := fs.dirref("testdir/dir1")
+ fs.add(fs.file, "testdir/dir1/file2")
+ fs.add(fs.file, "testdir/dir2/file1")
+ fs.add(fs.file, "testdir/dir2/file2")
+ fs.symlink("testdir/dir2/link1", file1)
+ fs.symlink("testdir/dir2/link2", dir1)
+ fs.symlink("testdir/dir2/link3", fs.notfound())
+
+ fs.dump(t)
+
+ maxdepth := 1<<31 - 1
+ for _, tc := range []struct {
+ fc findCommand
+ want string
+ }{
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `. ./Makefile ./testdir ./testdir/file1 ./testdir/file2 ./testdir/dir1 ./testdir/dir1/file1 ./testdir/dir1/file2 ./testdir/dir2 ./testdir/dir2/file1 ./testdir/dir2/file2 ./testdir/dir2/link1 ./testdir/dir2/link2 ./testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"./"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `./ ./Makefile ./testdir ./testdir/file1 ./testdir/file2 ./testdir/dir1 ./testdir/dir1/file1 ./testdir/dir1/file2 ./testdir/dir2 ./testdir/dir2/file1 ./testdir/dir2/file2 ./testdir/dir2/link1 ./testdir/dir2/link2 ./testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{".///"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `./// .///Makefile .///testdir .///testdir/file1 .///testdir/file2 .///testdir/dir1 .///testdir/dir1/file1 .///testdir/dir1/file2 .///testdir/dir2 .///testdir/dir2/file1 .///testdir/dir2/file2 .///testdir/dir2/link1 .///testdir/dir2/link2 .///testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"./."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `./. ././Makefile ././testdir ././testdir/file1 ././testdir/file2 ././testdir/dir1 ././testdir/dir1/file1 ././testdir/dir1/file2 ././testdir/dir2 ././testdir/dir2/file1 ././testdir/dir2/file2 ././testdir/dir2/link1 ././testdir/dir2/link2 ././testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"././"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `././ ././Makefile ././testdir ././testdir/file1 ././testdir/file2 ././testdir/dir1 ././testdir/dir1/file1 ././testdir/dir1/file2 ././testdir/dir2 ././testdir/dir2/file1 ././testdir/dir2/file2 ././testdir/dir2/link1 ././testdir/dir2/link2 ././testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir/../testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/../testdir testdir/../testdir/file1 testdir/../testdir/file2 testdir/../testdir/dir1 testdir/../testdir/dir1/file1 testdir/../testdir/dir1/file2 testdir/../testdir/dir2 testdir/../testdir/dir2/file1 testdir/../testdir/dir2/file2 testdir/../testdir/dir2/link1 testdir/../testdir/dir2/link2 testdir/../testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpName("foo"), findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: ``,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpName("file1"), findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpAnd{findOpName("*1"), findOpName("file*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpName("file*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpRegular{}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpNot{findOpRegular{}}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2 testdir/dir2/file1 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeDir}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2 testdir/dir2/file1 testdir/dir2/link1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpName("*1"), findOpType{mode: os.ModeSymlink}}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/dir1 testdir/dir1/file1 testdir/dir2/file1 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/dir1 testdir/dir2 testdir/dir2/link1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("*1"), findOpType{mode: os.ModeSymlink}}), findOpName("dir*")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/dir1 testdir/dir2 testdir/dir2/link1`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpAnd([]findOp{findOpOr{findOpName("dir*"), findOpName("*1")}, findOpRegular{}}), findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/dir1/file1 testdir/dir2/file1`,
+ },
+ {
+ fc: findCommand{
+ chdir: "testdir",
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `. ./file1 ./file2 ./dir1 ./dir1/file1 ./dir1/file2 ./dir2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2 ./dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ chdir: "testdir",
+ finddirs: []string{"../testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `../testdir ../testdir/file1 ../testdir/file2 ../testdir/dir1 ../testdir/dir1/file1 ../testdir/dir1/file2 ../testdir/dir2 ../testdir/dir2/file1 ../testdir/dir2/file2 ../testdir/dir2/link1 ../testdir/dir2/link2 ../testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ testdir: "testdir",
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ chdir: "testdir",
+ testdir: "testdir",
+ finddirs: []string{"."},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `. ./file1 ./file2 ./dir1 ./dir1/file1 ./dir1/file2 ./dir2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2 ./dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpOr{findOpAnd([]findOp{findOpName("dir2"), findOpPrune{}}), findOpName("file1")}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/dir1/file1 testdir/dir2`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir", "testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3 testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ // symlink
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ followSymlinks: true,
+ ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/file1 testdir/file2 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2/file1 testdir/dir2/link2/file2`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ followSymlinks: true,
+ ops: []findOp{findOpType{mode: os.ModeDir, followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir testdir/dir1 testdir/dir2 testdir/dir2/link2`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ followSymlinks: true,
+ ops: []findOp{findOpType{mode: os.ModeSymlink, followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ chdir: "testdir",
+ finddirs: []string{"."},
+ followSymlinks: true,
+ ops: []findOp{findOpRegular{followSymlinks: true}, findOpPrint{}},
+ depth: maxdepth,
+ },
+ want: `./file1 ./file2 ./dir1/file1 ./dir1/file2 ./dir2/file1 ./dir2/file2 ./dir2/link1 ./dir2/link2/file1 ./dir2/link2/file2`,
+ },
+ // maxdepth
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: 1,
+ },
+ want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir2`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: 2,
+ },
+ want: `testdir testdir/file1 testdir/file2 testdir/dir1 testdir/dir1/file1 testdir/dir1/file2 testdir/dir2 testdir/dir2/file1 testdir/dir2/file2 testdir/dir2/link1 testdir/dir2/link2 testdir/dir2/link3`,
+ },
+ {
+ fc: findCommand{
+ finddirs: []string{"testdir"},
+ ops: []findOp{findOpPrint{}},
+ depth: 0,
+ },
+ want: `testdir`,
+ },
+ } {
+ var wb wordBuffer
+ tc.fc.run(&wb)
+ if got, want := wb.buf.String(), tc.want; got != want {
+ t.Errorf("%#v\n got %q\n want %q", tc.fc, got, want)
+ }
+ }
+}
+
+func TestParseFindleavesCommand(t *testing.T) {
+ for _, tc := range []struct {
+ cmd string
+ want findleavesCommand
+ }{
+ {
+ cmd: `build/tools/findleaves.py --prune=out --prune=.repo --prune=.git . CleanSpec.mk`,
+ want: findleavesCommand{
+ name: "CleanSpec.mk",
+ dirs: []string{"."},
+ prunes: []string{"out", ".repo", ".git"},
+ mindepth: -1,
+ },
+ },
+ {
+ cmd: `build/tools/findleaves.py --prune=out --prune=.repo --prune=.git --mindepth=2 art bionic Android.mk`,
+ want: findleavesCommand{
+ name: "Android.mk",
+ dirs: []string{"art", "bionic"},
+ prunes: []string{"out", ".repo", ".git"},
+ mindepth: 2,
+ },
+ },
+ } {
+ fc, err := parseFindleavesCommand(tc.cmd)
+ if err != nil {
+ t.Errorf("parseFindleavesCommand(%q)=_, %v; want=_, <nil", tc.cmd, err)
+ continue
+ }
+ if got, want := fc, tc.want; !reflect.DeepEqual(got, want) {
+ t.Errorf("parseFindleavesCommand(%q)=%#v\n want=%#v\n", tc.cmd, got, want)
+ }
+ }
+}
+
+func TestFindleaves(t *testing.T) {
+ fs := newFS()
+ defer fs.close()
+
+ fs.add(fs.file, "art/Android.mk")
+ fs.add(fs.file, "art/compiler/Android.mk")
+ fs.add(fs.file, "art/CleanSpec.mk")
+ fs.add(fs.file, "bionic/Android.mk")
+ fs.add(fs.file, "bionic/CleanSpec.mk")
+ fs.add(fs.file, "bootable/recovery/Android.mk")
+ fs.add(fs.file, "bootable/recovery/CleanSpec.mk")
+ fs.add(fs.file, "frameworks/base/Android.mk")
+ fs.add(fs.file, "frameworks/base/CleanSpec.mk")
+ fs.add(fs.file, "frameworks/base/cmds/am/Android.mk")
+ fs.add(fs.file, "frameworks/base/cmds/pm/Android.mk")
+ fs.add(fs.file, "frameworks/base/location/Android.mk")
+ fs.add(fs.file, "frameworks/base/packages/WAPPushManager/CleanSpec.mk")
+ fs.add(fs.file, "out/outputfile")
+ fs.add(fs.file, "art/.git/index")
+ fs.add(fs.file, ".repo/manifests")
+
+ fs.dump(t)
+
+ for _, tc := range []struct {
+ fc findleavesCommand
+ want string
+ }{
+ {
+ fc: findleavesCommand{
+ name: "CleanSpec.mk",
+ dirs: []string{"."},
+ prunes: []string{"out", ".repo", ".git"},
+ mindepth: -1,
+ },
+ want: `./art/CleanSpec.mk ./bionic/CleanSpec.mk ./bootable/recovery/CleanSpec.mk ./frameworks/base/CleanSpec.mk`,
+ },
+ {
+ fc: findleavesCommand{
+ name: "Android.mk",
+ dirs: []string{"art", "bionic", "frameworks/base"},
+ prunes: []string{"out", ".repo", ".git"},
+ mindepth: 2,
+ },
+ want: `art/compiler/Android.mk frameworks/base/cmds/am/Android.mk frameworks/base/cmds/pm/Android.mk frameworks/base/location/Android.mk`,
+ },
+ {
+ fc: findleavesCommand{
+ name: "Android.mk",
+ dirs: []string{"art", "bionic", "frameworks/base"},
+ prunes: []string{"out", ".repo", ".git"},
+ mindepth: 3,
+ },
+ want: `frameworks/base/cmds/am/Android.mk frameworks/base/cmds/pm/Android.mk`,
+ },
+ } {
+ var wb wordBuffer
+ tc.fc.run(&wb)
+ if got, want := wb.buf.String(), tc.want; got != want {
+ t.Errorf("%#v\n got %q\n want %q", tc.fc, got, want)
+ }
+ }
+}
diff --git a/golang/kati/query.go b/golang/kati/query.go
new file mode 100644
index 0000000..5da1f61
--- /dev/null
+++ b/golang/kati/query.go
@@ -0,0 +1,99 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "io"
+)
+
+func showDeps(w io.Writer, n *DepNode, indent int, seen map[string]int) {
+ id, present := seen[n.Output]
+ if !present {
+ id = len(seen)
+ seen[n.Output] = id
+ }
+ fmt.Fprintf(w, "%*c%s (%d)\n", indent, ' ', n.Output, id)
+ if present {
+ return
+ }
+ for _, d := range n.Deps {
+ showDeps(w, d, indent+1, seen)
+ }
+ if len(n.OrderOnlys) > 0 {
+ fmt.Fprintf(w, "%*corder_onlys:\n", indent, ' ')
+ for _, d := range n.OrderOnlys {
+ showDeps(w, d, indent+1, seen)
+ }
+ }
+}
+
+func showNode(w io.Writer, n *DepNode) {
+ fmt.Fprintf(w, "%s:", n.Output)
+ for _, i := range n.ActualInputs {
+ fmt.Fprintf(w, " %s", i)
+ }
+ fmt.Fprintf(w, "\n")
+ for _, c := range n.Cmds {
+ fmt.Fprintf(w, "\t%s\n", c)
+ }
+ for k, v := range n.TargetSpecificVars {
+ fmt.Fprintf(w, "%s: %s=%s\n", n.Output, k, v.String())
+ }
+
+ fmt.Fprintf(w, "\n")
+ fmt.Fprintf(w, "location: %s:%d\n", n.Filename, n.Lineno)
+ if n.IsPhony {
+ fmt.Fprintf(w, "phony: true\n")
+ }
+
+ seen := make(map[string]int)
+ fmt.Fprintf(w, "dependencies:\n")
+ showDeps(w, n, 1, seen)
+}
+
+func handleNodeQuery(w io.Writer, q string, nodes []*DepNode) {
+ for _, n := range nodes {
+ if n.Output == q {
+ showNode(w, n)
+ break
+ }
+ }
+}
+
+// Query queries q in g.
+func Query(w io.Writer, q string, g *DepGraph) {
+ if q == "$MAKEFILE_LIST" {
+ for _, mk := range g.accessedMks {
+ fmt.Fprintf(w, "%s: state=%d\n", mk.Filename, mk.State)
+ }
+ return
+ }
+
+ if q == "$*" {
+ for k, v := range g.vars {
+ fmt.Fprintf(w, "%s=%s\n", k, v.String())
+ }
+ return
+ }
+
+ if q == "*" {
+ for _, n := range g.nodes {
+ fmt.Fprintf(w, "%s\n", n.Output)
+ }
+ return
+ }
+ handleNodeQuery(w, q, g.nodes)
+}
diff --git a/golang/kati/rule_parser.go b/golang/kati/rule_parser.go
new file mode 100644
index 0000000..1bea505
--- /dev/null
+++ b/golang/kati/rule_parser.go
@@ -0,0 +1,267 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strings"
+)
+
+type pattern struct {
+ prefix, suffix string
+}
+
+func (p pattern) String() string {
+ return p.prefix + "%" + p.suffix
+}
+
+func (p pattern) match(s string) bool {
+ return strings.HasPrefix(s, p.prefix) && strings.HasSuffix(s, p.suffix)
+}
+
+func (p pattern) subst(repl, str string) string {
+ in := str
+ trimed := str
+ if p.prefix != "" {
+ trimed = strings.TrimPrefix(in, p.prefix)
+ if trimed == in {
+ return str
+ }
+ }
+ in = trimed
+ if p.suffix != "" {
+ trimed = strings.TrimSuffix(in, p.suffix)
+ if trimed == in {
+ return str
+ }
+ }
+ rs := strings.SplitN(repl, "%", 2)
+ if len(rs) != 2 {
+ return repl
+ }
+ return rs[0] + trimed + rs[1]
+}
+
+type rule struct {
+ srcpos
+ // outputs is output of the rule.
+ // []string{} for ': xxx'
+ // nil for empty line.
+ outputs []string
+
+ inputs []string
+ orderOnlyInputs []string
+ outputPatterns []pattern
+ isDoubleColon bool
+ isSuffixRule bool
+ cmds []string
+ cmdLineno int
+}
+
+func (r *rule) cmdpos() srcpos {
+ return srcpos{filename: r.filename, lineno: r.cmdLineno}
+}
+
+func isPatternRule(s []byte) (pattern, bool) {
+ i := findLiteralChar(s, '%', 0, noSkipVar)
+ if i < 0 {
+ return pattern{}, false
+ }
+ return pattern{prefix: string(s[:i]), suffix: string(s[i+1:])}, true
+}
+
+func unescapeInput(s []byte) []byte {
+ // only "\ ", "\=" becoms " ", "=" respectively?
+ // other \-escape, such as "\:" keeps "\:".
+ for i := 0; i < len(s); i++ {
+ if s[i] != '\\' {
+ continue
+ }
+ if i+1 < len(s) && s[i+1] == ' ' || s[i+1] == '=' {
+ copy(s[i:], s[i+1:])
+ s = s[:len(s)-1]
+ }
+ }
+ return s
+}
+
+func unescapeTarget(s []byte) []byte {
+ for i := 0; i < len(s); i++ {
+ if s[i] != '\\' {
+ continue
+ }
+ copy(s[i:], s[i+1:])
+ s = s[:len(s)-1]
+ }
+ return s
+}
+
+func (r *rule) parseInputs(s []byte) {
+ ws := newWordScanner(s)
+ ws.esc = true
+ add := func(t string) {
+ r.inputs = append(r.inputs, t)
+ }
+ for ws.Scan() {
+ input := ws.Bytes()
+ if len(input) == 1 && input[0] == '|' {
+ add = func(t string) {
+ r.orderOnlyInputs = append(r.orderOnlyInputs, t)
+ }
+ continue
+ }
+ input = unescapeInput(input)
+ if !hasWildcardMetaByte(input) {
+ add(internBytes(input))
+ continue
+ }
+ m, _ := fsCache.Glob(string(input))
+ if len(m) == 0 {
+ add(internBytes(input))
+ continue
+ }
+ for _, t := range m {
+ add(intern(t))
+ }
+ }
+}
+
+func (r *rule) parseVar(s []byte, rhs expr) (*assignAST, error) {
+ var lhsBytes []byte
+ var op string
+ // TODO(ukai): support override, export.
+ if s[len(s)-1] != '=' {
+ panic(fmt.Sprintf("unexpected lhs %q", s))
+ }
+ switch s[len(s)-2] { // s[len(s)-1] is '='
+ case ':':
+ lhsBytes = trimSpaceBytes(s[:len(s)-2])
+ op = ":="
+ case '+':
+ lhsBytes = trimSpaceBytes(s[:len(s)-2])
+ op = "+="
+ case '?':
+ lhsBytes = trimSpaceBytes(s[:len(s)-2])
+ op = "?="
+ default:
+ lhsBytes = trimSpaceBytes(s[:len(s)-1])
+ op = "="
+ }
+ assign := &assignAST{
+ lhs: literal(string(lhsBytes)),
+ rhs: compactExpr(rhs),
+ op: op,
+ }
+ assign.srcpos = r.srcpos
+ return assign, nil
+}
+
+// parse parses rule line.
+// line is rule line until '=', or before ';'.
+// line was already expaned, so probably no need to skip var $(xxx) when
+// finding literal char. i.e. $ is parsed as literal '$'.
+// assign is not nil, if line was known as target specific var '<xxx>: <v>=<val>'
+// rhs is not nil, if line ended with '=' (target specific var after evaluated)
+func (r *rule) parse(line []byte, assign *assignAST, rhs expr) (*assignAST, error) {
+ line = trimLeftSpaceBytes(line)
+ // See semicolon.mk.
+ if rhs == nil && (len(line) == 0 || line[0] == ';') {
+ return nil, nil
+ }
+ r.outputs = []string{}
+
+ index := findLiteralChar(line, ':', 0, noSkipVar)
+ if index < 0 {
+ return nil, errors.New("*** missing separator.")
+ }
+
+ first := line[:index]
+ ws := newWordScanner(first)
+ ws.esc = true
+ pat, isFirstPattern := isPatternRule(first)
+ if isFirstPattern {
+ n := 0
+ for ws.Scan() {
+ n++
+ if n > 1 {
+ return nil, errors.New("*** mixed implicit and normal rules: deprecated syntax")
+ }
+ }
+ r.outputPatterns = []pattern{pat}
+ } else {
+ for ws.Scan() {
+ // TODO(ukai): expand raw wildcard for output. any usage?
+ r.outputs = append(r.outputs, internBytes(unescapeTarget(ws.Bytes())))
+ }
+ }
+
+ index++
+ if index < len(line) && line[index] == ':' {
+ r.isDoubleColon = true
+ index++
+ }
+
+ rest := line[index:]
+ if assign != nil {
+ if len(rest) > 0 {
+ panic(fmt.Sprintf("pattern specific var? line:%q", line))
+ }
+ return assign, nil
+ }
+ if rhs != nil {
+ assign, err := r.parseVar(rest, rhs)
+ if err != nil {
+ return nil, err
+ }
+ return assign, nil
+ }
+ index = bytes.IndexByte(rest, ';')
+ if index >= 0 {
+ r.cmds = append(r.cmds, string(rest[index+1:]))
+ rest = rest[:index-1]
+ }
+ index = findLiteralChar(rest, ':', 0, noSkipVar)
+ if index < 0 {
+ r.parseInputs(rest)
+ return nil, nil
+ }
+
+ // %.x: %.y: %.z
+ if isFirstPattern {
+ return nil, errors.New("*** mixed implicit and normal rules: deprecated syntax")
+ }
+
+ second := rest[:index]
+ third := rest[index+1:]
+
+ // r.outputs is already set.
+ ws = newWordScanner(second)
+ if !ws.Scan() {
+ return nil, errors.New("*** missing target pattern.")
+ }
+ outpat, ok := isPatternRule(ws.Bytes())
+ if !ok {
+ return nil, errors.New("*** target pattern contains no '%'.")
+ }
+ r.outputPatterns = []pattern{outpat}
+ if ws.Scan() {
+ return nil, errors.New("*** multiple target patterns.")
+ }
+ r.parseInputs(third)
+
+ return nil, nil
+}
diff --git a/golang/kati/rule_parser_test.go b/golang/kati/rule_parser_test.go
new file mode 100644
index 0000000..be12041
--- /dev/null
+++ b/golang/kati/rule_parser_test.go
@@ -0,0 +1,223 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "reflect"
+ "testing"
+)
+
+func TestRuleParser(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ tsv *assignAST
+ rhs expr
+ want rule
+ assign *assignAST
+ err string
+ }{
+ {
+ in: "foo: bar",
+ want: rule{
+ outputs: []string{"foo"},
+ inputs: []string{"bar"},
+ },
+ },
+ {
+ in: "foo: bar baz",
+ want: rule{
+ outputs: []string{"foo"},
+ inputs: []string{"bar", "baz"},
+ },
+ },
+ {
+ in: "foo:: bar",
+ want: rule{
+ outputs: []string{"foo"},
+ inputs: []string{"bar"},
+ isDoubleColon: true,
+ },
+ },
+ {
+ in: "foo",
+ err: "*** missing separator.",
+ },
+ {
+ in: "%.o: %.c",
+ want: rule{
+ outputs: []string{},
+ outputPatterns: []pattern{pattern{suffix: ".o"}},
+ inputs: []string{"%.c"},
+ },
+ },
+ {
+ in: "foo %.o: %.c",
+ err: "*** mixed implicit and normal rules: deprecated syntax",
+ },
+ {
+ in: "foo.o: %.o: %.c %.h",
+ want: rule{
+ outputs: []string{"foo.o"},
+ outputPatterns: []pattern{pattern{suffix: ".o"}},
+ inputs: []string{"%.c", "%.h"},
+ },
+ },
+ {
+ in: "%.x: %.y: %.z",
+ err: "*** mixed implicit and normal rules: deprecated syntax",
+ },
+ {
+ in: "foo.o: : %.c",
+ err: "*** missing target pattern.",
+ },
+ {
+ in: "foo.o: %.o %.o: %.c",
+ err: "*** multiple target patterns.",
+ },
+ {
+ in: "foo.o: foo.o: %.c",
+ err: "*** target pattern contains no '%'.",
+ },
+ {
+ in: "foo: bar | baz",
+ want: rule{
+ outputs: []string{"foo"},
+ inputs: []string{"bar"},
+ orderOnlyInputs: []string{"baz"},
+ },
+ },
+ {
+ in: "foo: CFLAGS =",
+ rhs: expr{literal("-g")},
+ want: rule{
+ outputs: []string{"foo"},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: "=",
+ },
+ },
+ {
+ in: "foo:",
+ tsv: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: "=",
+ },
+ want: rule{
+ outputs: []string{"foo"},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: "=",
+ },
+ },
+ {
+ in: "foo: CFLAGS=",
+ rhs: expr{literal("-g")},
+ want: rule{
+ outputs: []string{"foo"},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: "=",
+ },
+ },
+ {
+ in: "foo: CFLAGS :=",
+ rhs: expr{literal("-g")},
+ want: rule{
+ outputs: []string{"foo"},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: ":=",
+ },
+ },
+ {
+ in: "%.o: CFLAGS :=",
+ rhs: expr{literal("-g")},
+ want: rule{
+ outputs: []string{},
+ outputPatterns: []pattern{pattern{suffix: ".o"}},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: ":=",
+ },
+ },
+ {
+ in: "%.o:",
+ tsv: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: ":=",
+ },
+ want: rule{
+ outputs: []string{},
+ outputPatterns: []pattern{pattern{suffix: ".o"}},
+ },
+ assign: &assignAST{
+ lhs: literal("CFLAGS"),
+ rhs: literal("-g"),
+ op: ":=",
+ },
+ },
+ /* TODO
+ {
+ in: "foo.o: %.c: %.c",
+ err: "*** target 'foo.o' doesn't match the target pattern",
+ },
+ */
+ } {
+ got := &rule{}
+ assign, err := got.parse([]byte(tc.in), tc.tsv, tc.rhs)
+ if tc.err != "" {
+ if err == nil {
+ t.Errorf(`r.parse(%q, %v)=_, <nil>, want _, %q`, tc.in, tc.rhs, tc.err)
+ continue
+ }
+ if got, want := err.Error(), tc.err; got != want {
+ t.Errorf(`r.parse(%q, %v)=_, %s, want %s`, tc.in, tc.rhs, got, want)
+ }
+ continue
+ }
+ if err != nil {
+ t.Errorf(`r.parse(%q, %v)=_, %v; want nil error`, tc.in, tc.rhs, err)
+ continue
+ }
+ if !reflect.DeepEqual(*got, tc.want) {
+ t.Errorf(`r.parse(%q, %v); r=%#v, want %#v`, tc.in, tc.rhs, *got, tc.want)
+ }
+ if tc.assign != nil {
+ if assign == nil {
+ t.Errorf(`r.parse(%q, %v)=<nil>; want=%#v`, tc.in, tc.rhs, tc.assign)
+ continue
+ }
+ if got, want := assign, tc.assign; !reflect.DeepEqual(got, want) {
+ t.Errorf(`r.parse(%q, %v)=%#v; want=%#v`, tc.in, tc.rhs, got, want)
+ }
+ continue
+ }
+ if assign != nil {
+ t.Errorf(`r.parse(%q, %v)=%v; want=<nil>`, tc.in, tc.rhs, assign)
+ }
+ }
+}
diff --git a/golang/kati/serialize.go b/golang/kati/serialize.go
new file mode 100644
index 0000000..3ccb469
--- /dev/null
+++ b/golang/kati/serialize.go
@@ -0,0 +1,796 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "crypto/sha1"
+ "encoding/binary"
+ "encoding/gob"
+ "encoding/json"
+ "fmt"
+ "io"
+ "io/ioutil"
+ "net/url"
+ "os"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+const (
+ valueTypeRecursive = 'R'
+ valueTypeSimple = 'S'
+ valueTypeTSV = 'T'
+ valueTypeUndefined = 'U'
+ valueTypeAssign = 'a'
+ valueTypeExpr = 'e'
+ valueTypeFunc = 'f'
+ valueTypeLiteral = 'l'
+ valueTypeNop = 'n'
+ valueTypeParamref = 'p'
+ valueTypeVarref = 'r'
+ valueTypeVarsubst = 's'
+ valueTypeTmpval = 't'
+)
+
+// JSON is a json loader/saver.
+var JSON LoadSaver
+
+// GOB is a gob loader/saver.
+var GOB LoadSaver
+
+func init() {
+ JSON = jsonLoadSaver{}
+ GOB = gobLoadSaver{}
+}
+
+type jsonLoadSaver struct{}
+type gobLoadSaver struct{}
+
+type dumpbuf struct {
+ w bytes.Buffer
+ err error
+}
+
+func (d *dumpbuf) Int(i int) {
+ if d.err != nil {
+ return
+ }
+ v := int32(i)
+ d.err = binary.Write(&d.w, binary.LittleEndian, &v)
+}
+
+func (d *dumpbuf) Str(s string) {
+ if d.err != nil {
+ return
+ }
+ d.Int(len(s))
+ if d.err != nil {
+ return
+ }
+ _, d.err = io.WriteString(&d.w, s)
+}
+
+func (d *dumpbuf) Bytes(b []byte) {
+ if d.err != nil {
+ return
+ }
+ d.Int(len(b))
+ if d.err != nil {
+ return
+ }
+ _, d.err = d.w.Write(b)
+}
+
+func (d *dumpbuf) Byte(b byte) {
+ if d.err != nil {
+ return
+ }
+ d.err = writeByte(&d.w, b)
+}
+
+type serializableVar struct {
+ Type string
+ V string
+ Origin string
+ Children []serializableVar
+}
+
+type serializableDepNode struct {
+ Output int
+ Cmds []string
+ Deps []int
+ OrderOnlys []int
+ Parents []int
+ HasRule bool
+ IsPhony bool
+ ActualInputs []int
+ TargetSpecificVars []int
+ Filename string
+ Lineno int
+}
+
+type serializableTargetSpecificVar struct {
+ Name string
+ Value serializableVar
+}
+
+type serializableGraph struct {
+ Nodes []*serializableDepNode
+ Vars map[string]serializableVar
+ Tsvs []serializableTargetSpecificVar
+ Targets []string
+ Roots []string
+ AccessedMks []*accessedMakefile
+ Exports map[string]bool
+}
+
+func encGob(v interface{}) (string, error) {
+ var buf bytes.Buffer
+ e := gob.NewEncoder(&buf)
+ err := e.Encode(v)
+ if err != nil {
+ return "", err
+ }
+ return buf.String(), nil
+}
+
+func encVar(k string, v Var) (string, error) {
+ var dump dumpbuf
+ dump.Str(k)
+ v.dump(&dump)
+ return dump.w.String(), dump.err
+}
+
+type depNodesSerializer struct {
+ nodes []*serializableDepNode
+ tsvs []serializableTargetSpecificVar
+ tsvMap map[string]int
+ targets []string
+ targetMap map[string]int
+ done map[string]bool
+ err error
+}
+
+func newDepNodesSerializer() *depNodesSerializer {
+ return &depNodesSerializer{
+ tsvMap: make(map[string]int),
+ targetMap: make(map[string]int),
+ done: make(map[string]bool),
+ }
+}
+
+func (ns *depNodesSerializer) serializeTarget(t string) int {
+ id, present := ns.targetMap[t]
+ if present {
+ return id
+ }
+ id = len(ns.targets)
+ ns.targetMap[t] = id
+ ns.targets = append(ns.targets, t)
+ return id
+}
+
+func (ns *depNodesSerializer) serializeDepNodes(nodes []*DepNode) {
+ if ns.err != nil {
+ return
+ }
+ for _, n := range nodes {
+ if ns.done[n.Output] {
+ continue
+ }
+ ns.done[n.Output] = true
+
+ var deps []int
+ for _, d := range n.Deps {
+ deps = append(deps, ns.serializeTarget(d.Output))
+ }
+ var orderonlys []int
+ for _, d := range n.OrderOnlys {
+ orderonlys = append(orderonlys, ns.serializeTarget(d.Output))
+ }
+ var parents []int
+ for _, d := range n.Parents {
+ parents = append(parents, ns.serializeTarget(d.Output))
+ }
+ var actualInputs []int
+ for _, i := range n.ActualInputs {
+ actualInputs = append(actualInputs, ns.serializeTarget(i))
+ }
+
+ // Sort keys for consistent serialization.
+ var tsvKeys []string
+ for k := range n.TargetSpecificVars {
+ tsvKeys = append(tsvKeys, k)
+ }
+ sort.Strings(tsvKeys)
+
+ var vars []int
+ for _, k := range tsvKeys {
+ v := n.TargetSpecificVars[k]
+ sv := serializableTargetSpecificVar{Name: k, Value: v.serialize()}
+ //gob := encGob(sv)
+ gob, err := encVar(k, v)
+ if err != nil {
+ ns.err = err
+ return
+ }
+ id, present := ns.tsvMap[gob]
+ if !present {
+ id = len(ns.tsvs)
+ ns.tsvMap[gob] = id
+ ns.tsvs = append(ns.tsvs, sv)
+ }
+ vars = append(vars, id)
+ }
+
+ ns.nodes = append(ns.nodes, &serializableDepNode{
+ Output: ns.serializeTarget(n.Output),
+ Cmds: n.Cmds,
+ Deps: deps,
+ OrderOnlys: orderonlys,
+ Parents: parents,
+ HasRule: n.HasRule,
+ IsPhony: n.IsPhony,
+ ActualInputs: actualInputs,
+ TargetSpecificVars: vars,
+ Filename: n.Filename,
+ Lineno: n.Lineno,
+ })
+ ns.serializeDepNodes(n.Deps)
+ if ns.err != nil {
+ return
+ }
+ ns.serializeDepNodes(n.OrderOnlys)
+ if ns.err != nil {
+ return
+ }
+ }
+}
+
+func makeSerializableVars(vars Vars) (r map[string]serializableVar) {
+ r = make(map[string]serializableVar)
+ for k, v := range vars {
+ r[k] = v.serialize()
+ }
+ return r
+}
+
+func makeSerializableGraph(g *DepGraph, roots []string) (serializableGraph, error) {
+ ns := newDepNodesSerializer()
+ ns.serializeDepNodes(g.nodes)
+ v := makeSerializableVars(g.vars)
+ return serializableGraph{
+ Nodes: ns.nodes,
+ Vars: v,
+ Tsvs: ns.tsvs,
+ Targets: ns.targets,
+ Roots: roots,
+ AccessedMks: g.accessedMks,
+ Exports: g.exports,
+ }, ns.err
+}
+
+func (jsonLoadSaver) Save(g *DepGraph, filename string, roots []string) error {
+ startTime := time.Now()
+ sg, err := makeSerializableGraph(g, roots)
+ if err != nil {
+ return err
+ }
+ o, err := json.MarshalIndent(sg, " ", " ")
+ if err != nil {
+ return err
+ }
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ _, err = f.Write(o)
+ if err != nil {
+ f.Close()
+ return err
+ }
+ err = f.Close()
+ if err != nil {
+ return err
+ }
+ logStats("json serialize time: %q", time.Since(startTime))
+ return nil
+}
+
+func (gobLoadSaver) Save(g *DepGraph, filename string, roots []string) error {
+ startTime := time.Now()
+ f, err := os.Create(filename)
+ if err != nil {
+ return err
+ }
+ e := gob.NewEncoder(f)
+ var sg serializableGraph
+ {
+ startTime := time.Now()
+ sg, err = makeSerializableGraph(g, roots)
+ if err != nil {
+ return err
+ }
+ logStats("gob serialize prepare time: %q", time.Since(startTime))
+ }
+ {
+ startTime := time.Now()
+ err = e.Encode(sg)
+ if err != nil {
+ return err
+ }
+ logStats("gob serialize output time: %q", time.Since(startTime))
+ }
+ err = f.Close()
+ if err != nil {
+ return err
+ }
+ logStats("gob serialize time: %q", time.Since(startTime))
+ return nil
+}
+
+func cacheFilename(mk string, roots []string) string {
+ filename := ".kati_cache." + mk
+ for _, r := range roots {
+ filename += "." + r
+ }
+ return url.QueryEscape(filename)
+}
+
+func saveCache(g *DepGraph, roots []string) error {
+ if len(g.accessedMks) == 0 {
+ return fmt.Errorf("no Makefile is read")
+ }
+ cacheFile := cacheFilename(g.accessedMks[0].Filename, roots)
+ for _, mk := range g.accessedMks {
+ // Inconsistent, do not dump this result.
+ if mk.State == fileInconsistent {
+ if exists(cacheFile) {
+ os.Remove(cacheFile)
+ }
+ return nil
+ }
+ }
+ return GOB.Save(g, cacheFile, roots)
+}
+
+func deserializeSingleChild(sv serializableVar) (Value, error) {
+ if len(sv.Children) != 1 {
+ return nil, fmt.Errorf("unexpected number of children: %q", sv)
+ }
+ return deserializeVar(sv.Children[0])
+}
+
+func deserializeVar(sv serializableVar) (r Value, err error) {
+ switch sv.Type {
+ case "literal":
+ return literal(sv.V), nil
+ case "tmpval":
+ return tmpval([]byte(sv.V)), nil
+ case "expr":
+ var e expr
+ for _, v := range sv.Children {
+ dv, err := deserializeVar(v)
+ if err != nil {
+ return nil, err
+ }
+ e = append(e, dv)
+ }
+ return e, nil
+ case "varref":
+ dv, err := deserializeSingleChild(sv)
+ if err != nil {
+ return nil, err
+ }
+ return &varref{varname: dv, paren: sv.V[0]}, nil
+ case "paramref":
+ v, err := strconv.Atoi(sv.V)
+ if err != nil {
+ return nil, err
+ }
+ return paramref(v), nil
+ case "varsubst":
+ varname, err := deserializeVar(sv.Children[0])
+ if err != nil {
+ return nil, err
+ }
+ pat, err := deserializeVar(sv.Children[1])
+ if err != nil {
+ return nil, err
+ }
+ subst, err := deserializeVar(sv.Children[2])
+ if err != nil {
+ return nil, err
+ }
+ return varsubst{
+ varname: varname,
+ pat: pat,
+ subst: subst,
+ paren: sv.V[0],
+ }, nil
+
+ case "func":
+ dv, err := deserializeVar(sv.Children[0])
+ if err != nil {
+ return nil, err
+ }
+ name, ok := dv.(literal)
+ if !ok {
+ return nil, fmt.Errorf("func name is not literal %s: %T", dv, dv)
+ }
+ f := funcMap[string(name[1:])]()
+ f.AddArg(name)
+ for _, a := range sv.Children[1:] {
+ dv, err := deserializeVar(a)
+ if err != nil {
+ return nil, err
+ }
+ f.AddArg(dv)
+ }
+ return f, nil
+ case "funcEvalAssign":
+ rhs, err := deserializeVar(sv.Children[2])
+ if err != nil {
+ return nil, err
+ }
+ return &funcEvalAssign{
+ lhs: sv.Children[0].V,
+ op: sv.Children[1].V,
+ rhs: rhs,
+ }, nil
+ case "funcNop":
+ return &funcNop{expr: sv.V}, nil
+
+ case "simple":
+ return &simpleVar{
+ value: strings.Split(sv.V, " "),
+ origin: sv.Origin,
+ }, nil
+ case "recursive":
+ expr, err := deserializeSingleChild(sv)
+ if err != nil {
+ return nil, err
+ }
+ return &recursiveVar{
+ expr: expr,
+ origin: sv.Origin,
+ }, nil
+
+ case ":=", "=", "+=", "?=":
+ dv, err := deserializeSingleChild(sv)
+ if err != nil {
+ return nil, err
+ }
+ v, ok := dv.(Var)
+ if !ok {
+ return nil, fmt.Errorf("not var: target specific var %s %T", dv, dv)
+ }
+ return &targetSpecificVar{
+ v: v,
+ op: sv.Type,
+ }, nil
+
+ default:
+ return nil, fmt.Errorf("unknown serialized variable type: %q", sv)
+ }
+}
+
+func deserializeVars(vars map[string]serializableVar) (Vars, error) {
+ r := make(Vars)
+ for k, v := range vars {
+ dv, err := deserializeVar(v)
+ if err != nil {
+ return nil, err
+ }
+ vv, ok := dv.(Var)
+ if !ok {
+ return nil, fmt.Errorf("not var: %s: %T", dv, dv)
+ }
+ r[k] = vv
+ }
+ return r, nil
+}
+
+func deserializeNodes(g serializableGraph) (r []*DepNode, err error) {
+ nodes := g.Nodes
+ tsvs := g.Tsvs
+ targets := g.Targets
+ // Deserialize all TSVs first so that multiple rules can share memory.
+ var tsvValues []Var
+ for _, sv := range tsvs {
+ dv, err := deserializeVar(sv.Value)
+ if err != nil {
+ return nil, err
+ }
+ vv, ok := dv.(Var)
+ if !ok {
+ return nil, fmt.Errorf("not var: %s %T", dv, dv)
+ }
+ tsvValues = append(tsvValues, vv)
+ }
+
+ nodeMap := make(map[string]*DepNode)
+ for _, n := range nodes {
+ var actualInputs []string
+ for _, i := range n.ActualInputs {
+ actualInputs = append(actualInputs, targets[i])
+ }
+
+ d := &DepNode{
+ Output: targets[n.Output],
+ Cmds: n.Cmds,
+ HasRule: n.HasRule,
+ IsPhony: n.IsPhony,
+ ActualInputs: actualInputs,
+ Filename: n.Filename,
+ Lineno: n.Lineno,
+ TargetSpecificVars: make(Vars),
+ }
+
+ for _, id := range n.TargetSpecificVars {
+ sv := tsvs[id]
+ d.TargetSpecificVars[sv.Name] = tsvValues[id]
+ }
+
+ nodeMap[targets[n.Output]] = d
+ r = append(r, d)
+ }
+
+ for _, n := range nodes {
+ d := nodeMap[targets[n.Output]]
+ for _, o := range n.Deps {
+ c, present := nodeMap[targets[o]]
+ if !present {
+ return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o])
+ }
+ d.Deps = append(d.Deps, c)
+ }
+ for _, o := range n.OrderOnlys {
+ c, present := nodeMap[targets[o]]
+ if !present {
+ return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o])
+ }
+ d.OrderOnlys = append(d.OrderOnlys, c)
+ }
+ for _, o := range n.Parents {
+ c, present := nodeMap[targets[o]]
+ if !present {
+ return nil, fmt.Errorf("unknown target: %d (%s)", o, targets[o])
+ }
+ d.Parents = append(d.Parents, c)
+ }
+ }
+
+ return r, nil
+}
+
+func human(n int) string {
+ if n >= 10*1000*1000*1000 {
+ return fmt.Sprintf("%.2fGB", float32(n)/1000/1000/1000)
+ }
+ if n >= 10*1000*1000 {
+ return fmt.Sprintf("%.2fMB", float32(n)/1000/1000)
+ }
+ if n >= 10*1000 {
+ return fmt.Sprintf("%.2fkB", float32(n)/1000)
+ }
+ return fmt.Sprintf("%dB", n)
+}
+
+func showSerializedNodesStats(nodes []*serializableDepNode) {
+ outputSize := 0
+ cmdSize := 0
+ depsSize := 0
+ orderOnlysSize := 0
+ actualInputSize := 0
+ tsvSize := 0
+ filenameSize := 0
+ linenoSize := 0
+ for _, n := range nodes {
+ outputSize += 4
+ for _, c := range n.Cmds {
+ cmdSize += len(c)
+ }
+ depsSize += 4 * len(n.Deps)
+ orderOnlysSize += 4 * len(n.OrderOnlys)
+ actualInputSize += 4 * len(n.ActualInputs)
+ tsvSize += 4 * len(n.TargetSpecificVars)
+ filenameSize += len(n.Filename)
+ linenoSize += 4
+ }
+ size := outputSize + cmdSize + depsSize + orderOnlysSize + actualInputSize + tsvSize + filenameSize + linenoSize
+ logStats("%d nodes %s", len(nodes), human(size))
+ logStats(" output %s", human(outputSize))
+ logStats(" command %s", human(cmdSize))
+ logStats(" deps %s", human(depsSize))
+ logStats(" orderonlys %s", human(orderOnlysSize))
+ logStats(" inputs %s", human(actualInputSize))
+ logStats(" tsv %s", human(tsvSize))
+ logStats(" filename %s", human(filenameSize))
+ logStats(" lineno %s", human(linenoSize))
+}
+
+func (v serializableVar) size() int {
+ size := 0
+ size += len(v.Type)
+ size += len(v.V)
+ size += len(v.Origin)
+ for _, c := range v.Children {
+ size += c.size()
+ }
+ return size
+}
+
+func showSerializedVarsStats(vars map[string]serializableVar) {
+ nameSize := 0
+ valueSize := 0
+ for k, v := range vars {
+ nameSize += len(k)
+ valueSize += v.size()
+ }
+ size := nameSize + valueSize
+ logStats("%d vars %s", len(vars), human(size))
+ logStats(" name %s", human(nameSize))
+ logStats(" value %s", human(valueSize))
+}
+
+func showSerializedTsvsStats(vars []serializableTargetSpecificVar) {
+ nameSize := 0
+ valueSize := 0
+ for _, v := range vars {
+ nameSize += len(v.Name)
+ valueSize += v.Value.size()
+ }
+ size := nameSize + valueSize
+ logStats("%d tsvs %s", len(vars), human(size))
+ logStats(" name %s", human(nameSize))
+ logStats(" value %s", human(valueSize))
+}
+
+func showSerializedTargetsStats(targets []string) {
+ size := 0
+ for _, t := range targets {
+ size += len(t)
+ }
+ logStats("%d targets %s", len(targets), human(size))
+}
+
+func showSerializedAccessedMksStats(accessedMks []*accessedMakefile) {
+ size := 0
+ for _, rm := range accessedMks {
+ size += len(rm.Filename) + len(rm.Hash) + 4
+ }
+ logStats("%d makefiles %s", len(accessedMks), human(size))
+}
+
+func showSerializedGraphStats(g serializableGraph) {
+ showSerializedNodesStats(g.Nodes)
+ showSerializedVarsStats(g.Vars)
+ showSerializedTsvsStats(g.Tsvs)
+ showSerializedTargetsStats(g.Targets)
+ showSerializedAccessedMksStats(g.AccessedMks)
+}
+
+func deserializeGraph(g serializableGraph) (*DepGraph, error) {
+ if StatsFlag {
+ showSerializedGraphStats(g)
+ }
+ nodes, err := deserializeNodes(g)
+ if err != nil {
+ return nil, err
+ }
+ vars, err := deserializeVars(g.Vars)
+ if err != nil {
+ return nil, err
+ }
+ return &DepGraph{
+ nodes: nodes,
+ vars: vars,
+ accessedMks: g.AccessedMks,
+ exports: g.Exports,
+ }, nil
+}
+
+func (jsonLoadSaver) Load(filename string) (*DepGraph, error) {
+ startTime := time.Now()
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ d := json.NewDecoder(f)
+ g := serializableGraph{Vars: make(map[string]serializableVar)}
+ err = d.Decode(&g)
+ if err != nil {
+ return nil, err
+ }
+ dg, err := deserializeGraph(g)
+ if err != nil {
+ return nil, err
+ }
+ logStats("gob deserialize time: %q", time.Since(startTime))
+ return dg, nil
+}
+
+func (gobLoadSaver) Load(filename string) (*DepGraph, error) {
+ startTime := time.Now()
+ f, err := os.Open(filename)
+ if err != nil {
+ return nil, err
+ }
+ defer f.Close()
+
+ d := gob.NewDecoder(f)
+ g := serializableGraph{Vars: make(map[string]serializableVar)}
+ err = d.Decode(&g)
+ if err != nil {
+ return nil, err
+ }
+ dg, err := deserializeGraph(g)
+ if err != nil {
+ return nil, err
+ }
+ logStats("json deserialize time: %q", time.Since(startTime))
+ return dg, nil
+}
+
+func loadCache(makefile string, roots []string) (*DepGraph, error) {
+ startTime := time.Now()
+ defer func() {
+ logStats("Cache lookup time: %q", time.Since(startTime))
+ }()
+
+ filename := cacheFilename(makefile, roots)
+ if !exists(filename) {
+ glog.Warningf("Cache not found %q", filename)
+ return nil, fmt.Errorf("cache not found: %s", filename)
+ }
+
+ g, err := GOB.Load(filename)
+ if err != nil {
+ glog.Warning("Cache load error %q: %v", filename, err)
+ return nil, err
+ }
+ for _, mk := range g.accessedMks {
+ if mk.State != fileExists && mk.State != fileNotExists {
+ return nil, fmt.Errorf("internal error: broken state: %d", mk.State)
+ }
+ if mk.State == fileNotExists {
+ if exists(mk.Filename) {
+ glog.Infof("Cache expired: %s", mk.Filename)
+ return nil, fmt.Errorf("cache expired: %s", mk.Filename)
+ }
+ } else {
+ c, err := ioutil.ReadFile(mk.Filename)
+ if err != nil {
+ glog.Infof("Cache expired: %s", mk.Filename)
+ return nil, fmt.Errorf("cache expired: %s", mk.Filename)
+ }
+ h := sha1.Sum(c)
+ if !bytes.Equal(h[:], mk.Hash[:]) {
+ glog.Infof("Cache expired: %s", mk.Filename)
+ return nil, fmt.Errorf("cache expired: %s", mk.Filename)
+ }
+ }
+ }
+ glog.Info("Cache found in %q", filename)
+ return g, nil
+}
diff --git a/golang/kati/shellutil.go b/golang/kati/shellutil.go
new file mode 100644
index 0000000..b7a16ac
--- /dev/null
+++ b/golang/kati/shellutil.go
@@ -0,0 +1,232 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "strings"
+ "time"
+)
+
+var shBuiltins = []struct {
+ name string
+ pattern expr
+ compact func(*funcShell, []Value) Value
+}{
+ {
+ name: "android:rot13",
+ // in repo/android/build/core/definisions.mk
+ // echo $(1) | tr 'a-zA-Z' 'n-za-mN-ZA-M'
+ pattern: expr{
+ literal("echo "),
+ matchVarref{},
+ literal(" | tr 'a-zA-Z' 'n-za-mN-ZA-M'"),
+ },
+ compact: func(sh *funcShell, matches []Value) Value {
+ return &funcShellAndroidRot13{
+ funcShell: sh,
+ v: matches[0],
+ }
+ },
+ },
+ {
+ name: "shell-date",
+ pattern: expr{
+ mustLiteralRE(`date \+(\S+)`),
+ },
+ compact: compactShellDate,
+ },
+ {
+ name: "shell-date-quoted",
+ pattern: expr{
+ mustLiteralRE(`date "\+([^"]+)"`),
+ },
+ compact: compactShellDate,
+ },
+}
+
+type funcShellAndroidRot13 struct {
+ *funcShell
+ v Value
+}
+
+func rot13(buf []byte) {
+ for i, b := range buf {
+ // tr 'a-zA-Z' 'n-za-mN-ZA-M'
+ if b >= 'a' && b <= 'z' {
+ b += 'n' - 'a'
+ if b > 'z' {
+ b -= 'z' - 'a' + 1
+ }
+ } else if b >= 'A' && b <= 'Z' {
+ b += 'N' - 'A'
+ if b > 'Z' {
+ b -= 'Z' - 'A' + 1
+ }
+ }
+ buf[i] = b
+ }
+}
+
+func (f *funcShellAndroidRot13) Eval(w evalWriter, ev *Evaluator) error {
+ abuf := newEbuf()
+ fargs, err := ev.args(abuf, f.v)
+ if err != nil {
+ return err
+ }
+ rot13(fargs[0])
+ w.Write(fargs[0])
+ abuf.release()
+ return nil
+}
+
+var (
+ // ShellDateTimestamp is an timestamp used for $(shell date).
+ ShellDateTimestamp time.Time
+ shellDateFormatRef = map[string]string{
+ "%Y": "2006",
+ "%m": "01",
+ "%d": "02",
+ "%H": "15",
+ "%M": "04",
+ "%S": "05",
+ "%b": "Jan",
+ "%k": "15", // XXX
+ }
+)
+
+type funcShellDate struct {
+ *funcShell
+ format string
+}
+
+func compactShellDate(sh *funcShell, v []Value) Value {
+ if ShellDateTimestamp.IsZero() {
+ return sh
+ }
+ tf, ok := v[0].(literal)
+ if !ok {
+ return sh
+ }
+ tfstr := string(tf)
+ for k, v := range shellDateFormatRef {
+ tfstr = strings.Replace(tfstr, k, v, -1)
+ }
+ return &funcShellDate{
+ funcShell: sh,
+ format: tfstr,
+ }
+}
+
+func (f *funcShellDate) Eval(w evalWriter, ev *Evaluator) error {
+ fmt.Fprint(w, ShellDateTimestamp.Format(f.format))
+ return nil
+}
+
+type buildinCommand interface {
+ run(w evalWriter)
+}
+
+var errFindEmulatorDisabled = errors.New("builtin: find emulator disabled")
+
+func parseBuiltinCommand(cmd string) (buildinCommand, error) {
+ if !UseFindEmulator {
+ return nil, errFindEmulatorDisabled
+ }
+ if strings.HasPrefix(trimLeftSpace(cmd), "build/tools/findleaves") {
+ return parseFindleavesCommand(cmd)
+ }
+ return parseFindCommand(cmd)
+}
+
+type shellParser struct {
+ cmd string
+ ungetToken string
+}
+
+func (p *shellParser) token() (string, error) {
+ if p.ungetToken != "" {
+ tok := p.ungetToken
+ p.ungetToken = ""
+ return tok, nil
+ }
+ p.cmd = trimLeftSpace(p.cmd)
+ if len(p.cmd) == 0 {
+ return "", io.EOF
+ }
+ if p.cmd[0] == ';' {
+ tok := p.cmd[0:1]
+ p.cmd = p.cmd[1:]
+ return tok, nil
+ }
+ if p.cmd[0] == '&' {
+ if len(p.cmd) == 1 || p.cmd[1] != '&' {
+ return "", errFindBackground
+ }
+ tok := p.cmd[0:2]
+ p.cmd = p.cmd[2:]
+ return tok, nil
+ }
+ // TODO(ukai): redirect token.
+ i := 0
+ for i < len(p.cmd) {
+ if isWhitespace(rune(p.cmd[i])) || p.cmd[i] == ';' || p.cmd[i] == '&' {
+ break
+ }
+ i++
+ }
+ tok := p.cmd[0:i]
+ p.cmd = p.cmd[i:]
+ c := tok[0]
+ if c == '\'' || c == '"' {
+ if len(tok) < 2 || tok[len(tok)-1] != c {
+ return "", errFindUnbalancedQuote
+ }
+ // todo: unquote?
+ tok = tok[1 : len(tok)-1]
+ }
+ return tok, nil
+}
+
+func (p *shellParser) unget(s string) {
+ if s != "" {
+ p.ungetToken = s
+ }
+}
+
+func (p *shellParser) expect(toks ...string) error {
+ tok, err := p.token()
+ if err != nil {
+ return err
+ }
+ for _, t := range toks {
+ if tok == t {
+ return nil
+ }
+ }
+ return fmt.Errorf("shell: token=%q; want=%q", tok, toks)
+}
+
+func (p *shellParser) expectSeq(toks ...string) error {
+ for _, tok := range toks {
+ err := p.expect(tok)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/golang/kati/shellutil_test.go b/golang/kati/shellutil_test.go
new file mode 100644
index 0000000..39c2c64
--- /dev/null
+++ b/golang/kati/shellutil_test.go
@@ -0,0 +1,102 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "testing"
+ "time"
+)
+
+func TestRot13(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want string
+ }{
+ {
+ in: "PRODUCT_PACKAGE_OVERLAYS",
+ want: "CEBQHPG_CNPXNTR_BIREYNLF",
+ },
+ {
+ in: "product_name",
+ want: "cebqhpg_anzr",
+ },
+ } {
+ buf := []byte(tc.in)
+ rot13(buf)
+ if got, want := string(buf), tc.want; got != want {
+ t.Errorf("rot13(%q) got=%q; want=%q", tc.in, got, want)
+ }
+ }
+}
+
+func TestShellDate(t *testing.T) {
+ ts := ShellDateTimestamp
+ ShellDateTimestamp = time.Now()
+ defer func() {
+ ShellDateTimestamp = ts
+ }()
+ for _, tc := range []struct {
+ sharg literal
+ format string
+ }{
+ {
+ sharg: literal("date +%Y-%m-%d"),
+ format: "2006-01-02",
+ },
+ {
+ sharg: literal("date +%Y%m%d.%H%M%S"),
+ format: "20060102.150405",
+ },
+ {
+ sharg: literal(`date "+%d %b %Y %k:%M"`),
+ format: "02 Jan 2006 15:04",
+ },
+ } {
+ var matched bool
+ for _, b := range shBuiltins {
+ if b.name != "shell-date" && b.name != "shell-date-quoted" {
+ continue
+ }
+ m, ok := matchExpr(expr{tc.sharg}, b.pattern)
+ if !ok {
+ t.Logf("%s not match with %s", b.name, tc.sharg)
+ continue
+ }
+ f := &funcShell{
+ fclosure: fclosure{
+ args: []Value{
+ literal("(shell"),
+ tc.sharg,
+ },
+ },
+ }
+ v := b.compact(f, m)
+ sd, ok := v.(*funcShellDate)
+ if !ok {
+ t.Errorf("%s: matched %s but not compacted", tc.sharg, b.name)
+ continue
+ }
+ if got, want := sd.format, tc.format; got != want {
+ t.Errorf("%s: format=%q, want=%q - %s", tc.sharg, got, want, b.name)
+ continue
+ }
+ matched = true
+ break
+ }
+ if !matched {
+ t.Errorf("%s: not matched", tc.sharg)
+ }
+ }
+}
diff --git a/golang/kati/stats.go b/golang/kati/stats.go
new file mode 100644
index 0000000..a8ea461
--- /dev/null
+++ b/golang/kati/stats.go
@@ -0,0 +1,200 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "sort"
+ "sync"
+ "time"
+)
+
+type traceEventT struct {
+ mu sync.Mutex
+ f io.WriteCloser
+ t0 time.Time
+ pid int
+}
+
+const (
+ traceEventMain = iota + 1
+ // add new ones to use new goroutine.
+)
+
+var traceEvent traceEventT
+
+// TraceEventStart starts trace event.
+func TraceEventStart(f io.WriteCloser) {
+ traceEvent.start(f)
+}
+
+// TraceEventStop stops trace event.
+func TraceEventStop() {
+ traceEvent.stop()
+}
+
+func (t *traceEventT) start(f io.WriteCloser) {
+ t.f = f
+ t.t0 = time.Now()
+ fmt.Fprint(t.f, "[ ")
+}
+
+func (t *traceEventT) enabled() bool {
+ return t.f != nil
+}
+
+func (t *traceEventT) stop() {
+ fmt.Fprint(t.f, "\n]\n")
+ t.f.Close()
+}
+
+type event struct {
+ name, v string
+ tid int
+ t time.Time
+ emit bool
+}
+
+func (t *traceEventT) begin(name string, v Value, tid int) event {
+ var e event
+ e.tid = tid
+ e.t = time.Now()
+ if t.f != nil || EvalStatsFlag {
+ e.name = name
+ e.v = v.String()
+ }
+ if t.f != nil {
+ e.emit = name == "include" || name == "shell"
+ if e.emit {
+ t.emit("B", e, e.t.Sub(t.t0))
+ }
+ }
+ return e
+}
+
+func (t *traceEventT) emit(ph string, e event, ts time.Duration) {
+ t.mu.Lock()
+ defer t.mu.Unlock()
+
+ if t.pid == 0 {
+ t.pid = os.Getpid()
+ } else {
+ fmt.Fprintf(t.f, ",\n")
+ }
+ fmt.Fprintf(t.f, `{"pid":%d,"tid":%d,"ts":%d,"ph":%q,"cat":%q,"name":%q,"args":{}}`,
+ t.pid,
+ e.tid,
+ ts.Nanoseconds()/1e3,
+ ph,
+ e.name,
+ e.v,
+ )
+}
+
+func (t *traceEventT) end(e event) {
+ if t.f != nil {
+ if e.emit {
+ t.emit("E", e, time.Since(t.t0))
+ }
+ }
+ stats.add(e.name, e.v, e.t)
+}
+
+type statsData struct {
+ Name string
+ Count int
+ Longest time.Duration
+ Total time.Duration
+}
+
+type statsT struct {
+ mu sync.Mutex
+ data map[string]statsData
+}
+
+var stats = &statsT{
+ data: make(map[string]statsData),
+}
+
+func (s *statsT) add(name, v string, t time.Time) {
+ if !EvalStatsFlag {
+ return
+ }
+ d := time.Since(t)
+ key := fmt.Sprintf("%s:%s", name, v)
+ s.mu.Lock()
+ sd := s.data[key]
+ if d > sd.Longest {
+ sd.Longest = d
+ }
+ sd.Total += d
+ sd.Count++
+ s.data[key] = sd
+ s.mu.Unlock()
+}
+
+// DumpStats dumps statistics collected if EvalStatsFlag is set.
+func DumpStats() {
+ if !EvalStatsFlag {
+ return
+ }
+ var sv byTotalTime
+ for k, v := range stats.data {
+ v.Name = k
+ sv = append(sv, v)
+ }
+ sort.Sort(sv)
+ fmt.Println("count,longest(ns),total(ns),longest,total,name")
+ for _, s := range sv {
+ fmt.Printf("%d,%d,%d,%v,%v,%s\n", s.Count, s.Longest, s.Total, s.Longest, s.Total, s.Name)
+ }
+}
+
+type byTotalTime []statsData
+
+func (b byTotalTime) Len() int { return len(b) }
+func (b byTotalTime) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
+func (b byTotalTime) Less(i, j int) bool {
+ return b[i].Total > b[j].Total
+}
+
+type shellStatsT struct {
+ mu sync.Mutex
+ duration time.Duration
+ count int
+}
+
+var shellStats = &shellStatsT{}
+
+func (s *shellStatsT) add(d time.Duration) {
+ s.mu.Lock()
+ s.duration += d
+ s.count++
+ s.mu.Unlock()
+}
+
+func (s *shellStatsT) Duration() time.Duration {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ return s.duration
+}
+
+func (s *shellStatsT) Count() int {
+ s.mu.Lock()
+ defer s.mu.Unlock()
+ return s.count
+}
diff --git a/golang/kati/strutil.go b/golang/kati/strutil.go
new file mode 100644
index 0000000..39d282b
--- /dev/null
+++ b/golang/kati/strutil.go
@@ -0,0 +1,430 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "path/filepath"
+ "strings"
+
+ "github.com/golang/glog"
+)
+
+var wsbytes = [256]bool{' ': true, '\t': true, '\n': true, '\r': true}
+
+// TODO(ukai): use unicode.IsSpace?
+func isWhitespace(ch rune) bool {
+ if int(ch) >= len(wsbytes) {
+ return false
+ }
+ return wsbytes[ch]
+}
+
+func splitSpaces(s string) []string {
+ var r []string
+ tokStart := -1
+ for i, ch := range s {
+ if isWhitespace(ch) {
+ if tokStart >= 0 {
+ r = append(r, s[tokStart:i])
+ tokStart = -1
+ }
+ } else {
+ if tokStart < 0 {
+ tokStart = i
+ }
+ }
+ }
+ if tokStart >= 0 {
+ r = append(r, s[tokStart:])
+ }
+ glog.V(2).Infof("splitSpace(%q)=%q", s, r)
+ return r
+}
+
+func splitSpacesBytes(s []byte) (r [][]byte) {
+ tokStart := -1
+ for i, ch := range s {
+ if isWhitespace(rune(ch)) {
+ if tokStart >= 0 {
+ r = append(r, s[tokStart:i])
+ tokStart = -1
+ }
+ } else {
+ if tokStart < 0 {
+ tokStart = i
+ }
+ }
+ }
+ if tokStart >= 0 {
+ r = append(r, s[tokStart:])
+ }
+ glog.V(2).Infof("splitSpace(%q)=%q", s, r)
+ return r
+}
+
+// TODO(ukai): use bufio.Scanner?
+type wordScanner struct {
+ in []byte
+ s int // word starts
+ i int // current pos
+ esc bool // handle \-escape
+}
+
+func newWordScanner(in []byte) *wordScanner {
+ return &wordScanner{
+ in: in,
+ }
+}
+
+func (ws *wordScanner) next() bool {
+ for ws.s = ws.i; ws.s < len(ws.in); ws.s++ {
+ if !wsbytes[ws.in[ws.s]] {
+ break
+ }
+ }
+ if ws.s == len(ws.in) {
+ return false
+ }
+ return true
+}
+
+func (ws *wordScanner) Scan() bool {
+ if !ws.next() {
+ return false
+ }
+ for ws.i = ws.s; ws.i < len(ws.in); ws.i++ {
+ if ws.esc && ws.in[ws.i] == '\\' {
+ ws.i++
+ continue
+ }
+ if wsbytes[ws.in[ws.i]] {
+ break
+ }
+ }
+ return true
+}
+
+func (ws *wordScanner) Bytes() []byte {
+ return ws.in[ws.s:ws.i]
+}
+
+func (ws *wordScanner) Remain() []byte {
+ if !ws.next() {
+ return nil
+ }
+ return ws.in[ws.s:]
+}
+
+func matchPattern(pat, str string) bool {
+ i := strings.IndexByte(pat, '%')
+ if i < 0 {
+ return pat == str
+ }
+ return strings.HasPrefix(str, pat[:i]) && strings.HasSuffix(str, pat[i+1:])
+}
+
+func matchPatternBytes(pat, str []byte) bool {
+ i := bytes.IndexByte(pat, '%')
+ if i < 0 {
+ return bytes.Equal(pat, str)
+ }
+ return bytes.HasPrefix(str, pat[:i]) && bytes.HasSuffix(str, pat[i+1:])
+}
+
+func substPattern(pat, repl, str string) string {
+ ps := strings.SplitN(pat, "%", 2)
+ if len(ps) != 2 {
+ if str == pat {
+ return repl
+ }
+ return str
+ }
+ in := str
+ trimed := str
+ if ps[0] != "" {
+ trimed = strings.TrimPrefix(in, ps[0])
+ if trimed == in {
+ return str
+ }
+ }
+ in = trimed
+ if ps[1] != "" {
+ trimed = strings.TrimSuffix(in, ps[1])
+ if trimed == in {
+ return str
+ }
+ }
+
+ rs := strings.SplitN(repl, "%", 2)
+ if len(rs) != 2 {
+ return repl
+ }
+ return rs[0] + trimed + rs[1]
+}
+
+func substPatternBytes(pat, repl, str []byte) (pre, subst, post []byte) {
+ i := bytes.IndexByte(pat, '%')
+ if i < 0 {
+ if bytes.Equal(str, pat) {
+ return repl, nil, nil
+ }
+ return str, nil, nil
+ }
+ in := str
+ trimed := str
+ if i > 0 {
+ trimed = bytes.TrimPrefix(in, pat[:i])
+ if bytes.Equal(trimed, in) {
+ return str, nil, nil
+ }
+ }
+ in = trimed
+ if i < len(pat)-1 {
+ trimed = bytes.TrimSuffix(in, pat[i+1:])
+ if bytes.Equal(trimed, in) {
+ return str, nil, nil
+ }
+ }
+
+ i = bytes.IndexByte(repl, '%')
+ if i < 0 {
+ return repl, nil, nil
+ }
+
+ return repl[:i], trimed, repl[i+1:]
+}
+
+func substRef(pat, repl, str string) string {
+ if strings.IndexByte(pat, '%') >= 0 && strings.IndexByte(repl, '%') >= 0 {
+ return substPattern(pat, repl, str)
+ }
+ str = strings.TrimSuffix(str, pat)
+ return str + repl
+}
+
+func stripExt(s string) string {
+ suf := filepath.Ext(s)
+ return s[:len(s)-len(suf)]
+}
+
+func trimLeftSpace(s string) string {
+ for i, ch := range s {
+ if !isWhitespace(ch) {
+ return s[i:]
+ }
+ }
+ return ""
+}
+
+func trimLeftSpaceBytes(s []byte) []byte {
+ for i, ch := range s {
+ if !isWhitespace(rune(ch)) {
+ return s[i:]
+ }
+ }
+ return nil
+}
+
+func trimRightSpaceBytes(s []byte) []byte {
+ for i := len(s) - 1; i >= 0; i-- {
+ ch := s[i]
+ if !isWhitespace(rune(ch)) {
+ return s[:i+1]
+ }
+ }
+ return nil
+}
+
+func trimSpaceBytes(s []byte) []byte {
+ s = trimLeftSpaceBytes(s)
+ return trimRightSpaceBytes(s)
+}
+
+// Strip leading sequences of './' from file names, so that ./file
+// and file are considered to be the same file.
+// From http://www.gnu.org/software/make/manual/make.html#Features
+func trimLeadingCurdir(s string) string {
+ for strings.HasPrefix(s, "./") {
+ s = s[2:]
+ }
+ return s
+}
+
+func contains(list []string, s string) bool {
+ for _, v := range list {
+ if v == s {
+ return true
+ }
+ }
+ return false
+}
+
+func firstWord(line []byte) ([]byte, []byte) {
+ s := newWordScanner(line)
+ if s.Scan() {
+ w := s.Bytes()
+ return w, s.Remain()
+ }
+ return line, nil
+}
+
+type findCharOption int
+
+const (
+ noSkipVar findCharOption = iota
+ skipVar
+)
+
+func findLiteralChar(s []byte, stop1, stop2 byte, op findCharOption) int {
+ i := 0
+ for {
+ var ch byte
+ for i < len(s) {
+ ch = s[i]
+ if ch == '\\' {
+ i += 2
+ continue
+ }
+ if ch == stop1 {
+ break
+ }
+ if ch == stop2 {
+ break
+ }
+ if op == skipVar && ch == '$' {
+ break
+ }
+ i++
+ }
+ if i >= len(s) {
+ return -1
+ }
+ if ch == '$' {
+ i++
+ if i == len(s) {
+ return -1
+ }
+ oparen := s[i]
+ cparen := closeParen(oparen)
+ i++
+ if cparen != 0 {
+ pcount := 1
+ SkipParen:
+ for i < len(s) {
+ ch = s[i]
+ switch ch {
+ case oparen:
+ pcount++
+ case cparen:
+ pcount--
+ if pcount == 0 {
+ i++
+ break SkipParen
+ }
+ }
+ i++
+ }
+ }
+ continue
+ }
+ return i
+ }
+}
+
+func removeComment(line []byte) ([]byte, bool) {
+ var buf []byte
+ for i := 0; i < len(line); i++ {
+ if line[i] != '#' {
+ continue
+ }
+ b := 1
+ for ; i-b >= 0; b++ {
+ if line[i-b] != '\\' {
+ break
+ }
+ }
+ b++
+ nb := b / 2
+ quoted := b%2 == 1
+ if buf == nil {
+ buf = make([]byte, len(line))
+ copy(buf, line)
+ line = buf
+ }
+ line = append(line[:i-b+nb+1], line[i:]...)
+ if !quoted {
+ return line[:i-b+nb+1], true
+ }
+ i = i - nb + 1
+ }
+ return line, false
+}
+
+// cmdline removes tab at the beginning of lines.
+func cmdline(line string) string {
+ buf := []byte(line)
+ for i := 0; i < len(buf); i++ {
+ if buf[i] == '\n' && i+1 < len(buf) && buf[i+1] == '\t' {
+ copy(buf[i+1:], buf[i+2:])
+ buf = buf[:len(buf)-1]
+ }
+ }
+ return string(buf)
+}
+
+// concatline removes backslash newline.
+// TODO: backslash baskslash newline becomes backslash newline.
+func concatline(line []byte) []byte {
+ var buf []byte
+ for i := 0; i < len(line); i++ {
+ if line[i] != '\\' {
+ continue
+ }
+ if i+1 == len(line) {
+ if line[i-1] != '\\' {
+ line = line[:i]
+ }
+ break
+ }
+ if line[i+1] == '\n' {
+ if buf == nil {
+ buf = make([]byte, len(line))
+ copy(buf, line)
+ line = buf
+ }
+ oline := trimRightSpaceBytes(line[:i])
+ oline = append(oline, ' ')
+ nextline := trimLeftSpaceBytes(line[i+2:])
+ line = append(oline, nextline...)
+ i = len(oline) - 1
+ continue
+ }
+ if i+2 < len(line) && line[i+1] == '\r' && line[i+2] == '\n' {
+ if buf == nil {
+ buf = make([]byte, len(line))
+ copy(buf, line)
+ line = buf
+ }
+ oline := trimRightSpaceBytes(line[:i])
+ oline = append(oline, ' ')
+ nextline := trimLeftSpaceBytes(line[i+3:])
+ line = append(oline, nextline...)
+ i = len(oline) - 1
+ continue
+ }
+ }
+ return line
+}
diff --git a/golang/kati/strutil_test.go b/golang/kati/strutil_test.go
new file mode 100644
index 0000000..56bfe87
--- /dev/null
+++ b/golang/kati/strutil_test.go
@@ -0,0 +1,253 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+)
+
+func TestSplitSpaces(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want []string
+ }{
+ {
+ in: "foo",
+ want: []string{"foo"},
+ },
+ {
+ in: " ",
+ want: nil,
+ },
+ {
+ in: " foo bar ",
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: " foo bar",
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: "foo bar ",
+ want: []string{"foo", "bar"},
+ },
+ } {
+ got := splitSpaces(tc.in)
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf(`splitSpaces(%q)=%q, want %q`, tc.in, got, tc.want)
+ }
+ }
+}
+
+func TestWordScanner(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want []string
+ }{
+ {
+ in: "foo",
+ want: []string{"foo"},
+ },
+ {
+ in: " ",
+ want: nil,
+ },
+ {
+ in: " foo bar ",
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: " foo bar",
+ want: []string{"foo", "bar"},
+ },
+ {
+ in: "foo bar ",
+ want: []string{"foo", "bar"},
+ },
+ } {
+ ws := newWordScanner([]byte(tc.in))
+ var got []string
+ for ws.Scan() {
+ got = append(got, string(ws.Bytes()))
+ }
+ if !reflect.DeepEqual(got, tc.want) {
+ t.Errorf(`wordScanner(%q)=%q, want %q`, tc.in, got, tc.want)
+ }
+ }
+}
+
+func TestSubstPattern(t *testing.T) {
+ concatStr := func(pre, subst, post []byte) string {
+ var s []byte
+ s = append(s, pre...)
+ s = append(s, subst...)
+ s = append(s, post...)
+ return string(s)
+ }
+
+ for _, tc := range []struct {
+ pat string
+ repl string
+ in string
+ want string
+ }{
+ {
+ pat: "%.c",
+ repl: "%.o",
+ in: "x.c",
+ want: "x.o",
+ },
+ {
+ pat: "c.%",
+ repl: "o.%",
+ in: "c.x",
+ want: "o.x",
+ },
+ {
+ pat: "%.c",
+ repl: "%.o",
+ in: "x.c.c",
+ want: "x.c.o",
+ },
+ {
+ pat: "%.c",
+ repl: "%.o",
+ in: "x.x y.c",
+ want: "x.x y.o",
+ },
+ {
+ pat: "%.%.c",
+ repl: "OK",
+ in: "x.%.c",
+ want: "OK",
+ },
+ {
+ pat: "x.c",
+ repl: "XX",
+ in: "x.c",
+ want: "XX",
+ },
+ {
+ pat: "x.c",
+ repl: "XX",
+ in: "x.c.c",
+ want: "x.c.c",
+ },
+ {
+ pat: "x.c",
+ repl: "XX",
+ in: "x.x.c",
+ want: "x.x.c",
+ },
+ } {
+ got := substPattern(tc.pat, tc.repl, tc.in)
+ if got != tc.want {
+ t.Errorf(`substPattern(%q,%q,%q)=%q, want %q`, tc.pat, tc.repl, tc.in, got, tc.want)
+ }
+
+ got = concatStr(substPatternBytes([]byte(tc.pat), []byte(tc.repl), []byte(tc.in)))
+ if got != tc.want {
+ fmt.Printf("substPatternBytes(%q,%q,%q)=%q, want %q\n", tc.pat, tc.repl, tc.in, got, tc.want)
+ t.Errorf(`substPatternBytes(%q,%q,%q)=%q, want %q`, tc.pat, tc.repl, tc.in, got, tc.want)
+ }
+ }
+}
+
+func TestRemoveComment(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want string
+ removed bool
+ }{
+ {
+ in: "foo",
+ want: "foo",
+ },
+ {
+ in: "foo #bar",
+ want: "foo ",
+ removed: true,
+ },
+ {
+ in: `foo \#bar`,
+ want: "foo #bar",
+ },
+ {
+ in: `foo \#bar # baz`,
+ want: `foo #bar `,
+ removed: true,
+ },
+ {
+ in: `foo \\ \# \: \; \% \= \a \? \+`,
+ want: `foo \\ # \: \; \% \= \a \? \+`,
+ },
+ {
+ in: `foo \\#bar`,
+ want: `foo \`,
+ removed: true,
+ },
+ {
+ in: `foo \\\#bar`,
+ want: `foo \#bar`,
+ },
+ {
+ in: `PASS:=\#PASS`,
+ want: `PASS:=#PASS`,
+ },
+ } {
+ got, removed := removeComment([]byte(tc.in))
+ if string(got) != tc.want {
+ t.Errorf("removeComment(%q)=%q, _; want=%q, _", tc.in, got, tc.want)
+ }
+ if removed != tc.removed {
+ t.Errorf("removeComment(%q)=_, %t; want=_, %t", tc.in, removed, tc.removed)
+ }
+ }
+}
+
+func TestConcatline(t *testing.T) {
+ for _, tc := range []struct {
+ in string
+ want string
+ }{
+ {
+ in: "foo",
+ want: "foo",
+ },
+ {
+ in: "foo \\\n\t bar",
+ want: "foo bar",
+ },
+ {
+ in: "foo \\\n \\\n\t bar",
+ want: "foo bar",
+ },
+ {
+ in: `foo \`,
+ want: `foo `,
+ },
+ {
+ in: `foo \\`,
+ want: `foo \\`,
+ },
+ } {
+ got := string(concatline([]byte(tc.in)))
+ if got != tc.want {
+ t.Errorf("concatline(%q)=%q; want=%q\n", tc.in, got, tc.want)
+ }
+ }
+}
diff --git a/golang/kati/symtab.go b/golang/kati/symtab.go
new file mode 100644
index 0000000..f8fc1f8
--- /dev/null
+++ b/golang/kati/symtab.go
@@ -0,0 +1,42 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import "sync"
+
+type symtabT struct {
+ mu sync.Mutex
+ m map[string]string
+}
+
+var symtab = &symtabT{
+ m: make(map[string]string),
+}
+
+func intern(s string) string {
+ symtab.mu.Lock()
+ v, ok := symtab.m[s]
+ if ok {
+ symtab.mu.Unlock()
+ return v
+ }
+ symtab.m[s] = s
+ symtab.mu.Unlock()
+ return s
+}
+
+func internBytes(s []byte) string {
+ return intern(string(s))
+}
diff --git a/golang/kati/var.go b/golang/kati/var.go
new file mode 100644
index 0000000..5e7e996
--- /dev/null
+++ b/golang/kati/var.go
@@ -0,0 +1,371 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "strings"
+)
+
+// Var is an interface of make variable.
+type Var interface {
+ Value
+ Append(*Evaluator, string) (Var, error)
+ AppendVar(*Evaluator, Value) (Var, error)
+ Flavor() string
+ Origin() string
+ IsDefined() bool
+}
+
+type targetSpecificVar struct {
+ v Var
+ op string
+}
+
+func (v *targetSpecificVar) Append(ev *Evaluator, s string) (Var, error) {
+ nv, err := v.v.Append(ev, s)
+ if err != nil {
+ return nil, err
+ }
+ return &targetSpecificVar{
+ v: nv,
+ op: v.op,
+ }, nil
+}
+func (v *targetSpecificVar) AppendVar(ev *Evaluator, v2 Value) (Var, error) {
+ nv, err := v.v.AppendVar(ev, v2)
+ if err != nil {
+ return nil, err
+ }
+ return &targetSpecificVar{
+ v: nv,
+ op: v.op,
+ }, nil
+}
+func (v *targetSpecificVar) Flavor() string {
+ return v.v.Flavor()
+}
+func (v *targetSpecificVar) Origin() string {
+ return v.v.Origin()
+}
+func (v *targetSpecificVar) IsDefined() bool {
+ return v.v.IsDefined()
+}
+func (v *targetSpecificVar) String() string {
+ // TODO: If we add the info of |op| a test starts
+ // failing. Shouldn't we use this only for debugging?
+ return v.v.String()
+ // return v.v.String() + " (op=" + v.op + ")"
+}
+func (v *targetSpecificVar) Eval(w evalWriter, ev *Evaluator) error {
+ return v.v.Eval(w, ev)
+}
+
+func (v *targetSpecificVar) serialize() serializableVar {
+ return serializableVar{
+ Type: v.op,
+ Children: []serializableVar{v.v.serialize()},
+ }
+}
+
+func (v *targetSpecificVar) dump(d *dumpbuf) {
+ d.Byte(valueTypeTSV)
+ d.Str(v.op)
+ v.v.dump(d)
+}
+
+type simpleVar struct {
+ // space separated. note that each string may contain spaces, so
+ // it is not word list.
+ value []string
+ origin string
+}
+
+func (v *simpleVar) Flavor() string { return "simple" }
+func (v *simpleVar) Origin() string { return v.origin }
+func (v *simpleVar) IsDefined() bool { return true }
+
+func (v *simpleVar) String() string { return strings.Join(v.value, " ") }
+func (v *simpleVar) Eval(w evalWriter, ev *Evaluator) error {
+ space := false
+ for _, v := range v.value {
+ if space {
+ writeByte(w, ' ')
+ }
+ io.WriteString(w, v)
+ space = true
+ }
+ return nil
+}
+func (v *simpleVar) serialize() serializableVar {
+ return serializableVar{
+ Type: "simple",
+ V: v.String(),
+ Origin: v.origin,
+ }
+}
+func (v *simpleVar) dump(d *dumpbuf) {
+ d.Byte(valueTypeSimple)
+ d.Int(len(v.value))
+ for _, v := range v.value {
+ d.Str(v)
+ }
+ d.Str(v.origin)
+}
+
+func (v *simpleVar) Append(ev *Evaluator, s string) (Var, error) {
+ val, _, err := parseExpr([]byte(s), nil, parseOp{})
+ if err != nil {
+ return nil, err
+ }
+ abuf := newEbuf()
+ err = val.Eval(abuf, ev)
+ if err != nil {
+ return nil, err
+ }
+ v.value = append(v.value, abuf.String())
+ abuf.release()
+ return v, nil
+}
+
+func (v *simpleVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
+ abuf := newEbuf()
+ err := val.Eval(abuf, ev)
+ if err != nil {
+ return nil, err
+ }
+ v.value = append(v.value, abuf.String())
+ abuf.release()
+ return v, nil
+}
+
+type automaticVar struct {
+ value []byte
+}
+
+func (v *automaticVar) Flavor() string { return "simple" }
+func (v *automaticVar) Origin() string { return "automatic" }
+func (v *automaticVar) IsDefined() bool { return true }
+
+func (v *automaticVar) String() string { return string(v.value) }
+func (v *automaticVar) Eval(w evalWriter, ev *Evaluator) error {
+ w.Write(v.value)
+ return nil
+}
+func (v *automaticVar) serialize() serializableVar {
+ return serializableVar{Type: ""}
+}
+func (v *automaticVar) dump(d *dumpbuf) {
+ d.err = fmt.Errorf("cannnot dump automatic var:%s", v.value)
+}
+
+func (v *automaticVar) Append(ev *Evaluator, s string) (Var, error) {
+ val, _, err := parseExpr([]byte(s), nil, parseOp{})
+ if err != nil {
+ return nil, err
+ }
+ abuf := newEbuf()
+ err = val.Eval(abuf, ev)
+ if err != nil {
+ return nil, err
+ }
+ value := []string{string(v.value), abuf.String()}
+ abuf.release()
+ return &simpleVar{
+ value: value,
+ origin: "file",
+ }, nil
+}
+
+func (v *automaticVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
+ abuf := newEbuf()
+ err := val.Eval(abuf, ev)
+ if err != nil {
+ return nil, err
+ }
+ value := []string{string(v.value), abuf.String()}
+ abuf.release()
+ return &simpleVar{
+ value: value,
+ origin: "file",
+ }, nil
+}
+
+type recursiveVar struct {
+ expr Value
+ origin string
+}
+
+func (v *recursiveVar) Flavor() string { return "recursive" }
+func (v *recursiveVar) Origin() string { return v.origin }
+func (v *recursiveVar) IsDefined() bool { return true }
+
+func (v *recursiveVar) String() string { return v.expr.String() }
+func (v *recursiveVar) Eval(w evalWriter, ev *Evaluator) error {
+ v.expr.Eval(w, ev)
+ return nil
+}
+func (v *recursiveVar) serialize() serializableVar {
+ return serializableVar{
+ Type: "recursive",
+ Children: []serializableVar{v.expr.serialize()},
+ Origin: v.origin,
+ }
+}
+func (v *recursiveVar) dump(d *dumpbuf) {
+ d.Byte(valueTypeRecursive)
+ v.expr.dump(d)
+ d.Str(v.origin)
+}
+
+func (v *recursiveVar) Append(_ *Evaluator, s string) (Var, error) {
+ var exp expr
+ if e, ok := v.expr.(expr); ok {
+ exp = append(e, literal(" "))
+ } else {
+ exp = expr{v.expr, literal(" ")}
+ }
+ sv, _, err := parseExpr([]byte(s), nil, parseOp{alloc: true})
+ if err != nil {
+ return nil, err
+ }
+ if aexpr, ok := sv.(expr); ok {
+ exp = append(exp, aexpr...)
+ } else {
+ exp = append(exp, sv)
+ }
+ v.expr = exp
+ return v, nil
+}
+
+func (v *recursiveVar) AppendVar(ev *Evaluator, val Value) (Var, error) {
+ var buf bytes.Buffer
+ buf.WriteString(v.expr.String())
+ buf.WriteByte(' ')
+ buf.WriteString(val.String())
+ e, _, err := parseExpr(buf.Bytes(), nil, parseOp{alloc: true})
+ if err != nil {
+ return nil, err
+ }
+ v.expr = e
+ return v, nil
+}
+
+type undefinedVar struct{}
+
+func (undefinedVar) Flavor() string { return "undefined" }
+func (undefinedVar) Origin() string { return "undefined" }
+func (undefinedVar) IsDefined() bool { return false }
+func (undefinedVar) String() string { return "" }
+func (undefinedVar) Eval(_ evalWriter, _ *Evaluator) error {
+ return nil
+}
+func (undefinedVar) serialize() serializableVar {
+ return serializableVar{Type: "undefined"}
+}
+func (undefinedVar) dump(d *dumpbuf) {
+ d.Byte(valueTypeUndefined)
+}
+
+func (undefinedVar) Append(*Evaluator, string) (Var, error) {
+ return undefinedVar{}, nil
+}
+
+func (undefinedVar) AppendVar(_ *Evaluator, val Value) (Var, error) {
+ return undefinedVar{}, nil
+}
+
+// Vars is a map for make variables.
+type Vars map[string]Var
+
+// usedEnvs tracks what environment variables are used.
+var usedEnvs = map[string]bool{}
+
+// Lookup looks up named make variable.
+func (vt Vars) Lookup(name string) Var {
+ if v, ok := vt[name]; ok {
+ if strings.HasPrefix(v.Origin(), "environment") {
+ usedEnvs[name] = true
+ }
+ return v
+ }
+ return undefinedVar{}
+}
+
+// origin precedence
+// override / environment override
+// command line
+// file
+// environment
+// default
+// TODO(ukai): is this correct order?
+var originPrecedence = map[string]int{
+ "override": 4,
+ "environment override": 4,
+ "command line": 3,
+ "file": 2,
+ "environment": 2,
+ "default": 1,
+ "undefined": 0,
+ "automatic": 0,
+}
+
+// Assign assigns v to name.
+func (vt Vars) Assign(name string, v Var) {
+ vo := v.Origin()
+ // assign automatic always win.
+ // assign new value to automatic always win.
+ if vo != "automatic" {
+ vp := originPrecedence[v.Origin()]
+ var op int
+ if ov, ok := vt[name]; ok {
+ op = originPrecedence[ov.Origin()]
+ }
+ if op > vp {
+ return
+ }
+ }
+ vt[name] = v
+}
+
+// NewVars creates new Vars.
+func NewVars(vt Vars) Vars {
+ r := make(Vars)
+ r.Merge(vt)
+ return r
+}
+
+// Merge merges vt2 into vt.
+func (vt Vars) Merge(vt2 Vars) {
+ for k, v := range vt2 {
+ vt[k] = v
+ }
+}
+
+// save saves value of the variable named name.
+// calling returned value will restore to the old value at the time
+// when save called.
+func (vt Vars) save(name string) func() {
+ if v, ok := vt[name]; ok {
+ return func() {
+ vt[name] = v
+ }
+ }
+ return func() {
+ delete(vt, name)
+ }
+}
diff --git a/golang/kati/version.go b/golang/kati/version.go
new file mode 100644
index 0000000..4ec6e6a
--- /dev/null
+++ b/golang/kati/version.go
@@ -0,0 +1,17 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+var gitVersion string
diff --git a/golang/kati/worker.go b/golang/kati/worker.go
new file mode 100644
index 0000000..a339d23
--- /dev/null
+++ b/golang/kati/worker.go
@@ -0,0 +1,368 @@
+// Copyright 2015 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package kati
+
+import (
+ "container/heap"
+ "errors"
+ "fmt"
+ "os"
+ "os/exec"
+ "syscall"
+ "time"
+
+ "github.com/golang/glog"
+)
+
+var (
+ errNothingDone = errors.New("nothing done")
+)
+
+type job struct {
+ n *DepNode
+ ex *Executor
+ parents []*job
+ outputTs int64
+ numDeps int
+ depsTs int64
+ id int
+
+ runners []runner
+}
+
+type jobResult struct {
+ j *job
+ w *worker
+ err error
+}
+
+type newDep struct {
+ j *job
+ neededBy *job
+}
+
+type worker struct {
+ wm *workerManager
+ jobChan chan *job
+ waitChan chan bool
+ doneChan chan bool
+}
+
+type jobQueue []*job
+
+func (jq jobQueue) Len() int { return len(jq) }
+func (jq jobQueue) Swap(i, j int) { jq[i], jq[j] = jq[j], jq[i] }
+
+func (jq jobQueue) Less(i, j int) bool {
+ // First come, first serve, for GNU make compatibility.
+ return jq[i].id < jq[j].id
+}
+
+func (jq *jobQueue) Push(x interface{}) {
+ item := x.(*job)
+ *jq = append(*jq, item)
+}
+
+func (jq *jobQueue) Pop() interface{} {
+ old := *jq
+ n := len(old)
+ item := old[n-1]
+ *jq = old[0 : n-1]
+ return item
+}
+
+func newWorker(wm *workerManager) *worker {
+ w := &worker{
+ wm: wm,
+ jobChan: make(chan *job),
+ waitChan: make(chan bool),
+ doneChan: make(chan bool),
+ }
+ return w
+}
+
+func (w *worker) Run() {
+ done := false
+ for !done {
+ select {
+ case j := <-w.jobChan:
+ err := j.build()
+ w.wm.ReportResult(w, j, err)
+ case done = <-w.waitChan:
+ }
+ }
+ w.doneChan <- true
+}
+
+func (w *worker) PostJob(j *job) {
+ w.jobChan <- j
+}
+
+func (w *worker) Wait() {
+ w.waitChan <- true
+ <-w.doneChan
+}
+
+func (j *job) createRunners() ([]runner, error) {
+ runners, _, err := createRunners(j.ex.ctx, j.n)
+ return runners, err
+}
+
+// TODO(ukai): use time.Time?
+func getTimestamp(filename string) int64 {
+ st, err := os.Stat(filename)
+ if err != nil {
+ return -2
+ }
+ return st.ModTime().Unix()
+}
+
+func (j *job) build() error {
+ if j.n.IsPhony {
+ j.outputTs = -2 // trigger cmd even if all inputs don't exist.
+ } else {
+ j.outputTs = getTimestamp(j.n.Output)
+ }
+
+ if !j.n.HasRule {
+ if j.outputTs >= 0 || j.n.IsPhony {
+ return errNothingDone
+ }
+ if len(j.parents) == 0 {
+ return fmt.Errorf("*** No rule to make target %q.", j.n.Output)
+ }
+ return fmt.Errorf("*** No rule to make target %q, needed by %q.", j.n.Output, j.parents[0].n.Output)
+ }
+
+ if j.outputTs >= j.depsTs {
+ // TODO: stats.
+ return errNothingDone
+ }
+
+ rr, err := j.createRunners()
+ if err != nil {
+ return err
+ }
+ if len(rr) == 0 {
+ return errNothingDone
+ }
+ for _, r := range rr {
+ err := r.run(j.n.Output)
+ glog.Warningf("cmd result for %q: %v", j.n.Output, err)
+ if err != nil {
+ exit := exitStatus(err)
+ return fmt.Errorf("*** [%s] Error %d", j.n.Output, exit)
+ }
+ }
+
+ if j.n.IsPhony {
+ j.outputTs = time.Now().Unix()
+ } else {
+ j.outputTs = getTimestamp(j.n.Output)
+ if j.outputTs < 0 {
+ j.outputTs = time.Now().Unix()
+ }
+ }
+ return nil
+}
+
+func (wm *workerManager) handleJobs() error {
+ for {
+ if len(wm.freeWorkers) == 0 {
+ return nil
+ }
+ if wm.readyQueue.Len() == 0 {
+ return nil
+ }
+ j := heap.Pop(&wm.readyQueue).(*job)
+ glog.V(1).Infof("run: %s", j.n.Output)
+
+ j.numDeps = -1 // Do not let other workers pick this.
+ w := wm.freeWorkers[0]
+ wm.freeWorkers = wm.freeWorkers[1:]
+ wm.busyWorkers[w] = true
+ w.jobChan <- j
+ }
+}
+
+func (wm *workerManager) updateParents(j *job) {
+ for _, p := range j.parents {
+ p.numDeps--
+ glog.V(1).Infof("child: %s (%d)", p.n.Output, p.numDeps)
+ if p.depsTs < j.outputTs {
+ p.depsTs = j.outputTs
+ }
+ wm.maybePushToReadyQueue(p)
+ }
+}
+
+type workerManager struct {
+ maxJobs int
+ jobs []*job
+ readyQueue jobQueue
+ jobChan chan *job
+ resultChan chan jobResult
+ newDepChan chan newDep
+ stopChan chan bool
+ waitChan chan bool
+ doneChan chan error
+ freeWorkers []*worker
+ busyWorkers map[*worker]bool
+ ex *Executor
+ runnings map[string]*job
+
+ finishCnt int
+ skipCnt int
+}
+
+func newWorkerManager(numJobs int) (*workerManager, error) {
+ wm := &workerManager{
+ maxJobs: numJobs,
+ jobChan: make(chan *job),
+ resultChan: make(chan jobResult),
+ newDepChan: make(chan newDep),
+ stopChan: make(chan bool),
+ waitChan: make(chan bool),
+ doneChan: make(chan error),
+ busyWorkers: make(map[*worker]bool),
+ }
+
+ wm.busyWorkers = make(map[*worker]bool)
+ for i := 0; i < numJobs; i++ {
+ w := newWorker(wm)
+ wm.freeWorkers = append(wm.freeWorkers, w)
+ go w.Run()
+ }
+ heap.Init(&wm.readyQueue)
+ go wm.Run()
+ return wm, nil
+}
+
+func exitStatus(err error) int {
+ if err == nil {
+ return 0
+ }
+ exit := 1
+ if err, ok := err.(*exec.ExitError); ok {
+ if w, ok := err.ProcessState.Sys().(syscall.WaitStatus); ok {
+ return w.ExitStatus()
+ }
+ }
+ return exit
+}
+
+func (wm *workerManager) hasTodo() bool {
+ return wm.finishCnt != len(wm.jobs)
+}
+
+func (wm *workerManager) maybePushToReadyQueue(j *job) {
+ if j.numDeps != 0 {
+ return
+ }
+ heap.Push(&wm.readyQueue, j)
+ glog.V(1).Infof("ready: %s", j.n.Output)
+}
+
+func (wm *workerManager) handleNewDep(j *job, neededBy *job) {
+ if j.numDeps < 0 {
+ neededBy.numDeps--
+ if neededBy.id > 0 {
+ panic("FIXME: already in WM... can this happen?")
+ }
+ } else {
+ j.parents = append(j.parents, neededBy)
+ }
+}
+
+func (wm *workerManager) Run() {
+ done := false
+ var err error
+Loop:
+ for wm.hasTodo() || len(wm.busyWorkers) > 0 || len(wm.runnings) > 0 || !done {
+ select {
+ case j := <-wm.jobChan:
+ glog.V(1).Infof("wait: %s (%d)", j.n.Output, j.numDeps)
+ j.id = len(wm.jobs) + 1
+ wm.jobs = append(wm.jobs, j)
+ wm.maybePushToReadyQueue(j)
+ case jr := <-wm.resultChan:
+ glog.V(1).Infof("done: %s", jr.j.n.Output)
+ delete(wm.busyWorkers, jr.w)
+ wm.freeWorkers = append(wm.freeWorkers, jr.w)
+ wm.updateParents(jr.j)
+ wm.finishCnt++
+ if jr.err == errNothingDone {
+ wm.skipCnt++
+ jr.err = nil
+ }
+ if jr.err != nil {
+ err = jr.err
+ close(wm.stopChan)
+ break Loop
+ }
+ case af := <-wm.newDepChan:
+ wm.handleNewDep(af.j, af.neededBy)
+ glog.V(1).Infof("dep: %s (%d) %s", af.neededBy.n.Output, af.neededBy.numDeps, af.j.n.Output)
+ case done = <-wm.waitChan:
+ }
+ err = wm.handleJobs()
+ if err != nil {
+ break Loop
+ }
+
+ glog.V(1).Infof("job=%d ready=%d free=%d busy=%d", len(wm.jobs)-wm.finishCnt, wm.readyQueue.Len(), len(wm.freeWorkers), len(wm.busyWorkers))
+ }
+ if !done {
+ <-wm.waitChan
+ }
+
+ for _, w := range wm.freeWorkers {
+ w.Wait()
+ }
+ for w := range wm.busyWorkers {
+ w.Wait()
+ }
+ wm.doneChan <- err
+}
+
+func (wm *workerManager) PostJob(j *job) error {
+ select {
+ case wm.jobChan <- j:
+ return nil
+ case <-wm.stopChan:
+ return errors.New("worker manager stopped")
+ }
+}
+
+func (wm *workerManager) ReportResult(w *worker, j *job, err error) {
+ select {
+ case wm.resultChan <- jobResult{w: w, j: j, err: err}:
+ case <-wm.stopChan:
+ }
+}
+
+func (wm *workerManager) ReportNewDep(j *job, neededBy *job) {
+ select {
+ case wm.newDepChan <- newDep{j: j, neededBy: neededBy}:
+ case <-wm.stopChan:
+ }
+}
+
+func (wm *workerManager) Wait() (int, error) {
+ wm.waitChan <- true
+ err := <-wm.doneChan
+ glog.V(2).Infof("finish %d skip %d", wm.finishCnt, wm.skipCnt)
+ return wm.finishCnt - wm.skipCnt, err
+}