func translateToSAT(vc VertexCover) (clauses sat.ClauseSet) { p := sat.Pred("vc") //at least constraint for each edge s := "at least one" for _, e := range vc.Edges { l1 := sat.Literal{true, sat.Atom{p, e.a, 0}} l2 := sat.Literal{true, sat.Atom{p, e.b, 0}} clauses.AddClause(s, l1, l2) } //global counter sorter := sorters.CreateCardinalityNetwork(vc.NVertex, vc.Size, sorters.AtMost, sorters.Pairwise) sorter.RemoveOutput() litIn := make([]sat.Literal, vc.NVertex) for i, _ := range litIn { litIn[i] = sat.Literal{true, sat.Atom{p, i + 1, 0}} } which := [8]bool{false, false, false, true, true, true, false, false} pred := sat.Pred("aux") clauses.AddClauseSet(sat.CreateEncoding(litIn, which, []sat.Literal{}, "atMost", pred, sorter)) return }
func (pb *Threshold) TranslateBySN() { pb.TransTyp = CSN pb.Normalize(LE, true) glob.A(pb.Typ == LE, "does not work on OPT or ==, but we have", pb.Typ) pb.SortDescending() sn := NewSortingNetwork(*pb) sn.CreateSorter() //glob.D("size of comparators", len(sn.Sorter.Comparators)) //PrintThresholdTikZ("sn.tex", []SortingNetwork{sn}) wh := 1 var which [8]bool switch wh { case 1: which = [8]bool{false, false, false, true, true, true, false, false} case 2: which = [8]bool{false, false, false, true, true, true, false, true} case 3: which = [8]bool{false, true, true, true, true, true, true, false} case 4: which = [8]bool{false, true, true, true, true, true, true, true} } pred := sat.Pred("auxSN_" + strconv.Itoa(pb.Id)) pb.Clauses.AddClauseSet(CreateEncoding(sn.LitIn, which, []sat.Literal{}, "BnB", pred, sn.Sorter)) }
func createLiterals(start int, n int) (literals []sat.Literal) { p := sat.Pred("x") literals = make([]sat.Literal, n) for i := 0; i < n; i++ { literals[i] = sat.Literal{true, sat.NewAtomP1(p, start+i)} } return }
func createEntries(weights []int64) (entries []Entry) { p := sat.Pred("x") entries = make([]Entry, len(weights)) for i := 0; i < len(weights); i++ { l := sat.Literal{true, sat.NewAtomP1(p, i)} entries[i] = Entry{l, weights[i]} } return }
func translateInstance(g QGC) (clauses sat.ClauseSet) { p := sat.Pred("v") for i, r := range g { for j, k := range r { if k >= 0 { l1 := sat.Literal{true, sat.NewAtomP3(p, i, j, k)} clauses.AddTaggedClause("Instance", l1) } } } return }
// creates an AtMost constraint // with coefficients in weights, // variables x1..xm func CreatePBOffset(offset int, weights []int64, K int64) (pb Threshold) { pb.Entries = make([]Entry, len(weights)) pb.Typ = LE pb.K = K // p := sat.Pred("x") for i := 0; i < len(weights); i++ { //l := sat.Literal{true, sat.NewAtomP(sat.Pred("x_{" + strconv.Itoa(i+offset) + "}"))} l := sat.Literal{true, sat.NewAtomP(sat.Pred("x" + strconv.Itoa(i+offset) + "}"))} pb.Entries[i] = Entry{l, weights[i]} } return }
func translateCardDecomposition(n int, typ constraints.OneTranslationType) (clauses sat.ClauseSet) { p := sat.Pred("v") for i := 0; i < n; i++ { for j := 0; j < n; j++ { lits := make([]sat.Literal, n) for k := 0; k < n; k++ { lits[k] = sat.Literal{true, sat.NewAtomP3(p, i, j, k)} } //clauses.AddTaggedClause("AtLeast", lits...) clauses.AddClauseSet(constraints.TranslateExactlyOne(typ, "ex1Value", lits).Clauses) } } for k := 0; k < n; k++ { for i := 0; i < n; i++ { lits := make([]sat.Literal, n) for j := 0; j < n; j++ { lits[j] = sat.Literal{true, sat.NewAtomP3(p, i, j, k)} } // in each row each value at most one clauses.AddClauseSet(constraints.TranslateExactlyOne(typ, "ex1Row", lits).Clauses) } } for k := 0; k < n; k++ { for j := 0; j < n; j++ { lits := make([]sat.Literal, n) for i := 0; i < n; i++ { lits[i] = sat.Literal{true, sat.NewAtomP3(p, i, j, k)} } // in each column each value at most one clauses.AddClauseSet(constraints.TranslateExactlyOne(typ, "ex1Column", lits).Clauses) } } return }
// Translate monotone MDDs to SAT // Together with AMO translation func convertMDD2Clauses(store mdd.IntervalMddStore, pb *Threshold) (clauses sat.ClauseSet) { pred := sat.Pred("mdd" + strconv.Itoa(pb.Id)) top_lit := sat.Literal{true, sat.NewAtomP1(pred, store.Top)} clauses.AddTaggedClause("Top", top_lit) for _, n := range store.Nodes { v_id, l, vds := store.ClauseIds(*n) if !n.IsZero() && !n.IsOne() { v_lit := sat.Literal{false, sat.NewAtomP1(pred, v_id)} last_id := -1 for i, vd_id := range vds { if last_id != vd_id { vd_lit := sat.Literal{true, sat.NewAtomP1(pred, vd_id)} if i > 0 { literal := pb.Entries[len(pb.Entries)-l+i-1].Literal clauses.AddTaggedClause("1B", v_lit, sat.Neg(literal), vd_lit) } else { clauses.AddTaggedClause("0B", v_lit, vd_lit) } } last_id = vd_id } } else if n.IsZero() { v_lit := sat.Literal{false, sat.NewAtomP1(pred, v_id)} clauses.AddTaggedClause("False", v_lit) } else if n.IsOne() { v_lit := sat.Literal{true, sat.NewAtomP1(pred, v_id)} clauses.AddTaggedClause("True", v_lit) } } return }
// CreateCardinality takes set of literals and creates a sorting network func (pb *Threshold) CreateCardinality() { for _, x := range pb.Entries { glob.A(x.Weight == 1, "Prerequisite for this translation") } literals := pb.Literals() sx := strconv.Itoa(int(pb.K)) + "\\" + strconv.Itoa(len(literals)) var s string var sorterEqTyp sorters.EquationType var w int // which type of clauses switch pb.Typ { case LE: w = 0 sorterEqTyp = sorters.AtMost s = pb.IdS() + "pb<SN" + sx case GE: w = 3 sorterEqTyp = sorters.AtLeast s = pb.IdS() + "pb>SN" + sx case EQ: w = 3 s = pb.IdS() + "pb=SN" + sx sorterEqTyp = sorters.Equal default: panic("Not supported") } sorter := sorters.CreateCardinalityNetwork(len(literals), int(pb.K), sorterEqTyp, sorters.Pairwise) sorter.RemoveOutput() pred := sat.Pred("SN-" + pb.IdS()) output := make([]sat.Literal, 0) pb.Clauses.AddClauseSet(CreateEncoding(literals, sorters.WhichCls(w), output, s, pred, sorter)) }
// returns the encoding of this PB // adds to internal clauses func (pb *Threshold) RewriteSameWeights() { // put following two lines in the code itself // go to the end of chains // reorder PB after this descending posAfterChains := pb.PosAfterChains() entries := pb.Entries[posAfterChains:] //glob.D(pb) es := make([]Entry, 0, len(entries)) rest := make([]Entry, 0, len(entries)) newEntries := make([]Entry, len(entries)) last := int64(-1) rewrite := 0 //number of rewrite chains pos := 0 // current position in newEntries pred := sat.Pred("re-aux") for i, x := range entries { if last == x.Weight { es = append(es, entries[i]) } else { if len(es) >= *glob.Len_rewrite_same_flag { rewrite++ var most int if pb.Typ == OPT { if *glob.Opt_bound_flag >= 0 { most = int(min(int64(len(es)), int64(math.Floor(float64(*glob.Opt_bound_flag)/float64(last))))) } else { most = len(es) } } else { most = int(min(int64(len(es)), int64(math.Floor(float64(pb.K)/float64(last))))) } output := make(Lits, most) input := make(Lits, len(es)) for j, _ := range input { if j < most { output[j] = sat.Literal{true, sat.NewAtomP3(pred, pb.Id, rewrite, j)} newEntries[pos] = Entry{output[j], es[j].Weight} pos++ } input[j] = es[j].Literal } sorter := sorters.CreateCardinalityNetwork(len(input), most, sorters.AtMost, sorters.Pairwise) sn_aux := sat.Pred("SN-" + pb.IdS() + "-" + strconv.Itoa(rewrite)) cls := CreateEncoding(input, sorters.WhichCls(2), output, pb.IdS()+"re-SN", sn_aux, sorter) //glob.D(pb.Id, "SN", len(input), most, cls.Size()) pb.Clauses.AddClauseSet(cls) pb.Chains = append(pb.Chains, Chain(output)) } else { rest = append(rest, es...) //glob.D("dont rewrite this", rest) } es = []Entry{entries[i]} } last = x.Weight } if len(es) >= *glob.Len_rewrite_same_flag { rewrite++ var most int if pb.Typ == OPT { if *glob.Opt_bound_flag != math.MaxInt64 { // glob.D(pb.Id, "Check", *glob.Opt_bound_flag+pb.Offset) most = int(min(int64(len(es)), int64(math.Floor(float64(*glob.Opt_bound_flag+pb.Offset)/float64(last))))) } else { most = len(es) } } else { most = int(min(int64(len(es)), int64(math.Floor(float64(pb.K)/float64(last))))) } //glob.D("most", most, "len(es)", len(es), "K", pb.K, "last", last) sn_aux := sat.Pred("SN-" + pb.IdS() + "-" + strconv.Itoa(rewrite)) output := make(Lits, most) input := make(Lits, len(es)) for j, _ := range input { if j < most { output[j] = sat.Literal{true, sat.NewAtomP3(pred, pb.Id, rewrite, j)} newEntries[pos] = Entry{output[j], es[j].Weight} pos++ } input[j] = es[j].Literal } sorter := sorters.CreateCardinalityNetwork(len(input), most, sorters.AtMost, sorters.Pairwise) cls := CreateEncoding(input, sorters.WhichCls(2), output, pb.IdS()+"re-SN", sn_aux, sorter) //glob.D(pb.Id, "SN", len(input), most, cls.Size()) pb.Clauses.AddClauseSet(cls) pb.Chains = append(pb.Chains, Chain(output)) } else { rest = append(rest, es...) } for _, x := range rest { newEntries[pos] = x pos++ } //glob.A(pos == len(newEntries), "Not enough entries copied!!") pb.Entries = pb.Entries[:posAfterChains+pos] copy(pb.Entries[posAfterChains:], newEntries) //glob.D(pb) //glob.D(pb.Chains) }
func printSAT(tasks []Task, workers []Worker) { pAssign := sat.Pred("assign") pWorks := sat.Pred("works") sat.SetUp(4, sorters.Pairwise) var clauses sat.ClauseSet // at least one: simple clause for _, t := range tasks { lits := make([]sat.Literal, len(t.worker)) i := 0 for wId, _ := range t.worker { lits[i] = sat.Literal{true, sat.Atom{pAssign, wId, t.id}} i++ } clauses.AddClause("al1", lits...) clauses.AddClauseSet(sat.CreateCardinality("am1", lits, 1, sorters.AtMost)) } // count number of employees for _, w := range workers { for _, tId := range w.skills { l1 := sat.Literal{false, sat.Atom{pAssign, w.id, tId}} l2 := sat.Literal{true, sat.Atom{pWorks, w.id, 0}} clauses.AddClause("wrk", l1, l2) } } lits := make([]sat.Literal, len(workers)) for i, w := range workers { lits[i] = sat.Literal{true, sat.Atom{pWorks, w.id, 0}} } clauses.AddClauseSet(sat.CreateCardinality("cWo", lits, *nWorkers, sorters.AtMost)) // intersections on the timeline: two ways to do it // 1) list all intersecting tasks // 2) find maximal cliques in the interval graph, and post for that for _, w := range workers { ts := make([]Task, len(w.skills)) for i, s := range w.skills { ts[i] = tasks[s] } sort.Sort(ByStart(ts)) switch *typeIntersect { case "simple": for i, t1 := range ts { for j := i + 1; j < len(ts); j++ { t2 := ts[j] if t2.start < t1.end { l1 := sat.Literal{false, sat.Atom{pAssign, w.id, t1.id}} l2 := sat.Literal{false, sat.Atom{pAssign, w.id, t2.id}} clauses.AddClause("isc1", l1, l2) } } } case "clique": // find the maximal cliques in the interval graph and pose AMO on them clique := make([]Task, 0) for _, t := range ts { sort.Sort(ByEnd(clique)) //todo: use a priority queue, e.g. heap //first one is earliest end time if len(clique) > 0 && clique[0].end <= t.start { // max clique reached //output the maximal clique! if len(clique) > 1 { lits := make([]sat.Literal, len(clique)) for i, c := range clique { lits[i] = sat.Literal{true, sat.Atom{pAssign, w.id, c.id}} fmt.Print(c.id, "(", c.start, ",", c.end, ") ") } fmt.Println() //fmt.Println("clique", w.id, lits) clauses.AddClauseSet(sat.CreateCardinality("cli", lits, 1, sorters.AtMost)) } //start removing elements: for len(clique) > 0 && clique[0].end <= t.start { clique = clique[1:] } } clique = append(clique, t) } if len(clique) > 1 { lits := make([]sat.Literal, len(clique)) for i, c := range clique { lits[i] = sat.Literal{true, sat.Atom{pAssign, w.id, c.id}} } //fmt.Println("clique", w.id, lits) clauses.AddClauseSet(sat.CreateCardinality("cli", lits, 1, sorters.AtMost)) } default: panic("Type not implemented") } } g := sat.IdGenerator(len(clauses) * 7) g.GenerateIds(clauses) //g.Filename = strings.Split(*f, ".")[0] + ".cnf" //g.Filename = *out if *dbg { g.PrintDebug(clauses) } else { g.PrintClausesDIMACS(clauses) } }
func main() { glob.Init() input, err2 := os.Open(*glob.Filename_flag) defer input.Close() if err2 != nil { panic("Could not read file") return } scanner := bufio.NewScanner(input) buf := make([]byte, 0, 64*1024) scanner.Buffer(buf, 1024*1024) state := 0 // 0: read size, 1: read graph 1, 2: read graph 2 vars := 0 orig_vars := 0 size := 0 i := 0 var entries []entry for scanner.Scan() { l := strings.Trim(scanner.Text(), " ") if l == "" || strings.HasPrefix(l, "%") || strings.HasPrefix(l, "*") { continue } elements := strings.Fields(l) var b error switch state { case 0: // deprecated: for parsing the "header" of pb files, now parser is flexible { vars, b = strconv.Atoi(elements[0]) if b != nil { panic("bad conversion of numbers") } orig_vars = vars size, b = strconv.Atoi(elements[1]) if b != nil { panic("bad conversion of numbers") } entries = make([]entry, size) state = 1 } case 1: { entries[i].id1, b = strconv.Atoi(elements[0]) if b != nil { panic("bad conversion of numbers") } entries[i].id2, b = strconv.Atoi(elements[1]) if b != nil { panic("bad conversion of numbers") } var f float64 f, b = strconv.ParseFloat(elements[2], 64) if b != nil { panic("bad conversion of numbers") } entries[i].c = int64(f) if entries[i].id1 != entries[i].id2 { vars++ entries[i].and = vars } i++ } } } var clauses sat.ClauseSet var opt constraints.Threshold opt.Typ = constraints.OPT lits := make([]sat.Literal, vars+1) primaryVars := make(map[string]bool, 0) for i := 0; i <= vars; i++ { primaryVars[sat.NewAtomP1(sat.Pred("x"), i).Id()] = true } for i, _ := range lits { lits[i] = sat.Literal{true, sat.NewAtomP1(sat.Pred("x"), i)} } for _, e := range entries { if e.id1 == e.id2 { opt.Entries = append(opt.Entries, constraints.Entry{lits[e.id1], int64(e.c)}) } else { clauses.AddClause(sat.Neg(lits[e.id1]), sat.Neg(lits[e.id2]), lits[e.and]) clauses.AddClause(lits[e.id1], sat.Neg(lits[e.and])) clauses.AddClause(lits[e.id2], sat.Neg(lits[e.and])) opt.Entries = append(opt.Entries, constraints.Entry{lits[e.and], int64(e.c)}) } } if *glob.Gringo_flag { for i := 0; i <= orig_vars; i++ { fmt.Println("{x(", i, ")}.") } for _, e := range entries { if e.id1 != e.id2 { fmt.Println(lits[e.and].ToTxt(), ":-", lits[e.id1].ToTxt(), ",", lits[e.id2].ToTxt(), ".") } } opt.PrintGringo() return } g := sat.IdGenerator(clauses.Size()*7 + 1) g.PrimaryVars = primaryVars opt.NormalizePositiveCoefficients() opt.Offset = opt.K // opt.PrintGringo() // clauses.PrintDebug() glob.D("offset", opt.Offset) glob.A(opt.Positive(), "opt only has positive coefficients") g.Solve(clauses, &opt, *glob.Opt_bound_flag, -opt.Offset) }
func TestAtMostOne(test *testing.T) { glob.D("TestTranslateAtMostOne") k := 6 lits := make([]sat.Literal, k) atoms := make(map[string]bool) for i, _ := range lits { lits[i] = sat.Literal{true, sat.NewAtomP1(sat.Pred("x"), i)} atoms[lits[i].A.Id()] = true } t := TranslateAtMostOne(Naive, "naive", lits) if t.Clauses.Size() == 0 { test.Fail() } t = TranslateAtMostOne(Split, "split", lits) if t.Clauses.Size() == 0 { test.Fail() } // t = TranslateAtMostOne(Sort, "sorter", lits) // if t.Clauses.Size() == 0 { // test.Fail() // } t = TranslateAtMostOne(Heule, "heule", lits) if t.Clauses.Size() == 0 { test.Fail() } t = TranslateAtMostOne(Log, "Log", lits) if t.Clauses.Size() == 0 { test.Fail() } //fmt.Println() t = TranslateAtMostOne(Count, "counter", lits) if t.Clauses.Size() == 0 { test.Fail() } //t.Clauses.PrintDebug() //g := sat.IdGenerator(t.Clauses.Size() * 7) //g.Filename = "out.cnf" //g.PrimaryVars = atoms //g.Solve(t.Clauses) //g.PrintSymbolTable("sym.txt") //fmt.Println() t = TranslateExactlyOne(Naive, "naive", lits) if t.Clauses.Size() == 0 { test.Fail() } t = TranslateExactlyOne(Split, "split", lits) if t.Clauses.Size() == 0 { test.Fail() } t = TranslateExactlyOne(Count, "counter", lits) if t.Clauses.Size() == 0 { test.Fail() } //t = TranslateExactlyOne(Sort, "sorter", lits) //if t.Clauses.Size() == 0 { // test.Fail() //} t = TranslateExactlyOne(Heule, "heule", lits) if t.Clauses.Size() == 0 { test.Fail() } t = TranslateExactlyOne(Log, "Log", lits) if t.Clauses.Size() == 0 { test.Fail() } }
func TranslateAtMostOne(typ OneTranslationType, tag string, lits []sat.Literal) (trans CardTranslation) { var clauses sat.ClauseSet switch typ { case Naive: for i, l := range lits { for j := i + 1; j < len(lits); j++ { clauses.AddTaggedClause(tag, sat.Neg(l), sat.Neg(lits[j])) } } case Split: // a constant that should be exposed, // its the cuttoff for the split method of atMostOne cutoff := 5 if len(lits) <= cutoff { return TranslateAtMostOne(Naive, tag, lits) } else { aux := sat.NewAtomP1(sat.Pred("split"), newId()) trans.Aux = append(trans.Aux, sat.Literal{true, aux}) for _, l := range lits[:len(lits)/2] { clauses.AddTaggedClause(tag, sat.Literal{true, aux}, sat.Neg(l)) } for _, l := range lits[len(lits)/2:] { clauses.AddTaggedClause(tag, sat.Literal{false, aux}, sat.Neg(l)) } clauses.AddClauseSet(TranslateAtMostOne(typ, tag, lits[:len(lits)/2]).Clauses) clauses.AddClauseSet(TranslateAtMostOne(typ, tag, lits[len(lits)/2:]).Clauses) } case Count: pred := sat.Pred("c") counterId := newId() auxs := make([]sat.Literal, len(lits)) for i, _ := range auxs { auxs[i] = sat.Literal{true, sat.NewAtomP2(pred, counterId, i)} } trans.Aux = auxs // S_i -> S_{i-1} for i := 1; i < len(lits); i++ { clauses.AddTaggedClause(tag, auxs[i-1], sat.Neg(auxs[i])) } // X_i -> S_i for i := 0; i < len(lits); i++ { clauses.AddTaggedClause(tag, auxs[i], sat.Neg(lits[i])) } // X_i-1 -> -S_i for i := 1; i < len(lits); i++ { clauses.AddTaggedClause(tag, sat.Neg(auxs[i]), sat.Neg(lits[i-1])) } // (S_i-1 /\ -S_i) -> X_i-1 for i := 1; i <= len(lits); i++ { if i != len(lits) { clauses.AddTaggedClause(tag, sat.Neg(auxs[i-1]), auxs[i], lits[i-1]) } else { clauses.AddTaggedClause(tag, sat.Neg(auxs[i-1]), lits[i-1]) } } case Heule: k := 4 // fixed size for the heule encoding if len(lits) > k+1 { aux := sat.NewAtomP1(sat.Pred("heule"), newId()) trans.Aux = append(trans.Aux, sat.Literal{true, aux}) front := make([]sat.Literal, k+1) copy(front, lits[:k]) front[k] = sat.Literal{true, aux} trans2 := TranslateAtMostOne(Naive, tag, front) clauses.AddClauseSet(trans2.Clauses) back := make([]sat.Literal, len(lits)-k+1) copy(back, lits[k:]) back[len(lits)-k] = sat.Literal{false, aux} trans2 = TranslateAtMostOne(typ, tag, back) trans.Aux = append(trans.Aux, trans2.Aux...) clauses.AddClauseSet(trans2.Clauses) } else { trans2 := TranslateAtMostOne(Naive, tag, lits) clauses.AddClauseSet(trans2.Clauses) } case Log: cutoff := 5 //will be a parameter of this encoding clauses = buildLogEncoding(sat.Pred("logE"), newId(), cutoff, 0, tag, lits) case Sort: panic("CNF translation for this type not implemented yet") default: panic("CNF translation for this type not implemented yet") } trans.Typ = typ trans.Clauses = clauses return }
func TranslateExactlyOne(typ OneTranslationType, tag string, lits []sat.Literal) (trans CardTranslation) { var clauses sat.ClauseSet switch typ { case Heule, Log, Naive, Split: trans2 := TranslateAtMostOne(typ, tag, lits) trans.Aux = append(trans.Aux, trans2.Aux...) clauses.AddClauseSet(trans2.Clauses) clauses.AddTaggedClause(tag, lits...) case Count: pred := sat.Pred("cx") counterId := newId() auxs := make([]sat.Literal, len(lits)) for i, _ := range auxs { auxs[i] = sat.Literal{true, sat.NewAtomP2(pred, counterId, i)} } trans.Aux = auxs // S_i -> S_{i-1} for i := 1; i < len(lits); i++ { clauses.AddTaggedClause(tag, auxs[i-1], sat.Neg(auxs[i])) } // X_i -> S_i for i := 0; i < len(lits); i++ { clauses.AddTaggedClause(tag, auxs[i], sat.Neg(lits[i])) } // X_i-1 -> -S_i for i := 1; i < len(lits); i++ { clauses.AddTaggedClause(tag, sat.Neg(auxs[i]), sat.Neg(lits[i-1])) } // (S_i-1 /\ -S_i) -> X_i-1 for i := 1; i <= len(lits); i++ { if i != len(lits) { clauses.AddTaggedClause(tag, sat.Neg(auxs[i-1]), auxs[i], lits[i-1]) } else { clauses.AddTaggedClause(tag, sat.Neg(auxs[i-1]), lits[i-1]) } } clauses.AddTaggedClause(tag, auxs[0]) case Sort: panic("CNF translation for this type not implemented yet") default: panic("CNF translation for this type not implemented yet") } trans.Typ = typ trans.Clauses = clauses return }
// returns list of *pb; first one is optimization statement, possibly empty func parse(filename string) (pbs []*constraints.Threshold, err error) { input, err2 := os.Open(filename) defer input.Close() if err2 != nil { err = errors.New("Please specify correct path to instance. Does not exist") return } scanner := bufio.NewScanner(input) buf := make([]byte, 0, 64*1024) scanner.Buffer(buf, 1024*1024) // 0 : first line, 1 : rest of the lines var count int state := 1 t := 0 pbs = make([]*constraints.Threshold, 0) for scanner.Scan() { l := strings.Trim(scanner.Text(), " ") if l == "" || strings.HasPrefix(l, "%") || strings.HasPrefix(l, "*") { continue } elements := strings.Fields(l) if len(elements) == 1 { // quick hack to ignore single element lines (not necessary) continue } switch state { case 0: // deprecated: for parsing the "header" of pb files, now parser is flexible { glob.D(l) var b1 error count, b1 = strconv.Atoi(elements[4]) vars, b2 := strconv.Atoi(elements[2]) if b1 != nil || b2 != nil { glob.D("cant convert to threshold:", l) panic("bad conversion of numbers") } glob.D("File PB file with", count, "constraints and", vars, "variables") state = 1 } case 1: { var n int // number of entries var f int // index of entry var o bool //optimization var pb constraints.Threshold offset_back := 0 if elements[len(elements)-1] != ";" { offset_back = 1 } if elements[0] == "min:" || elements[0] == "Min" { o = true n = (len(elements) + offset_back - 2) / 2 f = 1 } else { o = false n = (len(elements) + offset_back - 3) / 2 f = 0 } pb.Entries = make([]constraints.Entry, n) for i := f; i < 2*n; i++ { weight, b1 := strconv.ParseInt(elements[i], 10, 64) i++ if b1 != nil { glob.D("cant convert to threshold:", elements[i], "\nin PB\n", l) panic("bad conversion of numbers") } atom := sat.NewAtomP(sat.Pred(elements[i])) pb.Entries[(i-f)/2] = constraints.Entry{sat.Literal{true, atom}, weight} } // fake empty opt in case it does not exist if t == 0 && !o { pbs = append(pbs, &constraints.Threshold{}) t++ } pb.Id = t if o { pb.Typ = constraints.OPT glob.D("Scanned optimization statement") } else { pb.K, err = strconv.ParseInt(elements[len(elements)-2+offset_back], 10, 64) if err != nil { glob.A(false, " cant parse threshold, error", err.Error(), pb.K) } typS := elements[len(elements)-3+offset_back] if typS == ">=" { pb.Typ = constraints.GE } else if typS == "<=" { pb.Typ = constraints.LE } else if typS == "==" || typS == "=" { pb.Typ = constraints.EQ } else { glob.A(false, "cant convert to threshold, equationtype typS:", typS) } } pbs = append(pbs, &pb) t++ //fmt.Println(pb.Id) //pb.Print10() } } } glob.A(len(pbs) == t, "Id of constraint must correspond to position") glob.D("Scanned", t-1, "PB constraints.") if len(pbs) > 0 && !pbs[0].Empty() { glob.D("Scanned OPT statement.") } return }