func TestSGD_ChooseManyEven(t *testing.T) { batchsize = 128 samplesize := 32000 source := rand.NewSource(1337) pRNG := rand.New(source) sgd := NewSGD(samplesize, pRNG) batches := sgd.ChooseMany(samplesize / batchsize) if len(batches) != samplesize/batchsize { t.Errorf("Expected %v Got %v For batchsize = %v and samplesize = %v", samplesize/batchsize, len(batches), batchsize, samplesize) } var samples sort.IntSlice for _, batch := range batches { if len(batch) < batchsize-1 || len(batch) > batchsize+1 { t.Errorf("Expected %v or %v Got %v For numbatches = %v and samplesize = %v", batchsize, batchsize-1, len(batch), samplesize/batchsize, samplesize) } for _, sample := range batch { samples = append(samples, sample) } } samples.Sort() for idx := range samples { if idx != samples[idx] { t.Error("Missing sample %v in batches", idx) } } }
func primaryKeys(s reflect.Type) ([]string, error) { pk := make(map[int]string, 0) var pkNum sort.IntSlice for i := 0; i < s.NumField(); i++ { field := s.Field(i) pkTag := field.Tag.Get("pk") if pkTag != "" { num, err := strconv.ParseInt(pkTag, 10, 8) if err != nil { return nil, err } fieldName := strings.ToLower(field.Name) n := int(num) if v, exist := pk[n]; exist { err = errors.New("key number conflict field " + v + "with " + fieldName) return nil, err } pk[n] = fieldName pkNum = append(pkNum, n) } } n := len(pk) if n == 0 { return nil, errors.New("struct doesn't have a primary key.") } pkNum.Sort() result := make([]string, n) for i, v := range pkNum { result[i] = pk[v] } return result, nil }
// convertIntList converts an intList expression into a sorted list of unique // integers func convertIntList(list jsonParser.IntList) ([]int, error) { intMap := make(map[int]bool) for _, r := range list { if strings.Contains(r, ":") { list, err := ConvertRangeToList(r) if err != nil { return nil, err } for _, i := range list { intMap[i] = true } } else { i, err := strconv.Atoi(r) if err != nil { return nil, fmt.Errorf("cannot convert range value %s into integer", r) } intMap[i] = true } } var outList sort.IntSlice //[]int for k := range intMap { outList = append(outList, k) } outList.Sort() return outList, nil }
// composeResponseMessages takes the ResponseErrors (if any) and creates ResponseMessages from them. func composeResponseMessages(route restful.Route, decl *ApiDeclaration) (messages []ResponseMessage) { if route.ResponseErrors == nil { return messages } // sort by code codes := sort.IntSlice{} for code, _ := range route.ResponseErrors { codes = append(codes, code) } codes.Sort() for _, code := range codes { each := route.ResponseErrors[code] message := ResponseMessage{ Code: code, Message: each.Message, } if each.Model != nil { st := reflect.TypeOf(each.Model) isCollection, st := detectCollectionType(st) modelName := modelBuilder{}.keyFrom(st) if isCollection { modelName = "array[" + modelName + "]" } modelBuilder{Models: &decl.Models}.addModel(st, "") // reference the model message.ResponseModel = modelName } messages = append(messages, message) } return }
func verifySortedOrder(msg string, rbt *LinkedOrderedMap, insertedNums sort.IntSlice) bool { var sortedNums sort.IntSlice sortedNums = append(sortedNums, insertedNums...) sortedNums.Sort() i := 0 for it := rbt.Iterator(); it.IsValid(); it.Next() { if sortedNums[i] != it.Value().(int) { t.Errorf("%s. Ordered iteration %d: Expecting %d but gets %d", msg, i, sortedNums[i], it.Value().(int)) return false } i++ } i = len(sortedNums) - 1 for it := rbt.ReverseIterator(); it.IsValid(); it.Next() { if sortedNums[i] != it.Value().(int) { t.Errorf("%s. Reverse ordered iteration %d: Expecting %d but gets %d", msg, i, sortedNums[i], it.Value().(int)) return false } i-- } return true }
func TestHeap1(t *testing.T) { for tests := 0; tests < 10000; tests++ { var heap Heap var data sort.IntSlice var length = rand.Intn(50) var key int for i := 0; i < length; i++ { key = rand.Intn(100) heap.Insert(key, key) data = append(data, key) } data.Sort() for _, v := range data { if heap.IsEmpty() { t.Errorf("Heap unexpectedly empty") } key := heap.Extract().(int) if key != v { t.Errorf("Keys are not equal %v %v", key, v) } } if !heap.IsEmpty() { t.Errorf("Heap not empty") } } }
func (e *mongoEngine) getPermIds(resString string, perms []string, create bool) (sort.IntSlice, error) { var ids sort.IntSlice var err error for _, p := range perms { id, exist := e.GetPerm(p, resString, create) if !exist { err = errs.ErrPermNotExist } ids = append(ids, id) } ids.Sort() return ids, err }
// Prefs returns sorted preferences func (list *MXlist) Prefs() []uint16 { prefs := sort.IntSlice{} for pref := range *list { prefs = append(prefs, int(pref)) } prefs.Sort() ret := []uint16{} for _, pref := range prefs { ret = append(ret, uint16(pref)) } return ret }
func TestHeap2(t *testing.T) { for tests := 0; tests < 10000; tests++ { var heap Heap var data sort.IntSlice var length = rand.Intn(50) var key int var inserted = 0 for i := 0; i < length; i++ { var insert = rand.Intn(10) >= 5 if insert { key = rand.Intn(100) heap.Insert(key, key) data = append(data, key) inserted++ } else { if inserted == 0 { continue } key = heap.Extract().(int) data.Sort() if key != data[0] { t.Errorf("Keys are not equal %v %v", key, data[0]) } data = data[1:] inserted-- } } data.Sort() for _, v := range data { if heap.IsEmpty() { t.Errorf("Heap is unexpectedly empty, removing") } key = heap.Extract().(int) if key != v { t.Errorf("Keys are not equal %v %v", key, v) } inserted-- } if !heap.IsEmpty() { t.Errorf("Heap not empty") } if inserted != 0 { t.Errorf("inserted not 0") } } }
func GetWords(ctx *gin.Context) { count := 40 level := ctx.Params.ByName("level") switch level { case "1": count = 25 case "3": count = 60 default: } maper := make(map[int]int) arr_id := sort.IntSlice{} rander := rand.New(rand.NewSource(time.Now().Unix())) for len(maper) < count { v := rander.Intn(1709) maper[v] = v } for k, _ := range maper { arr_id = append(arr_id, k) } arr_id.Sort() sqlstr := "select word,interpretation from word where id in (" + getsql(count) + ")" var words []db.Word var args []interface{} for _, v := range arr_id { args = append(args, v) } if _, err := db.DB.Select(&words, sqlstr, args...); err != nil { ctx.String(200, err.Error()+sqlstr) // ctx.HTML(http.StatusBadRequest, "main", err) return } ctx.HTML(http.StatusOK, "main", words) }
// state 2 - Build and train test set func interactiveBuildTrainAndTestSet() { var ( err os.Error inputString string inputInt int ) // STEP 1: // Begin building training and test set fmt.Println("Building train and test set") inputString = promptString("filename", "What file would you like to split?") debugMsg("Opening file: %s", inputString) // Open the file for reading dataFile, err := os.Open(inputString) errCheck(err) // We do not need this file after, so close it upon leaving this method defer dataFile.Close() // Create a buffered reader for the file dataReader := bufio.NewReader(dataFile) var line string // STEP 2: // Create a map for storing the temporary files tempFileMap := map[string]*os.File{} countMap := map[string]int{} var exists bool // For checking if element exists var tempFile *os.File // Place holder for the temporary file // which is to be put in the map. for line, err = dataReader.ReadString('\n'); // read line by line err == nil; // stop on error line, err = dataReader.ReadString('\n') { // Take each instance and write it to a label specific file line = strings.Trim(line, "\n") feature := strings.Split(line, ",") label := feature[len(feature)-1] tempFile, exists = tempFileMap[label] countMap[label]++ if exists { // Write to the file _, err = tempFile.WriteString(line + "\n") errCheck(err) } else { // Create the file and write the line tempFileName := dataFile.Name() + "." + label + ".tmp" debugMsg("Creating temporary file: %s", tempFileName) tempFile, err := os.OpenFile( tempFileName, os.O_CREATE+os.O_WRONLY+os.O_TRUNC, 0666) tempFileMap[label] = tempFile defer tempFile.Close() defer os.Remove(tempFileName) _, err = tempFile.WriteString(line + "\n") errCheck(err) } } // Close and re-open the files as readable debugMsg("Closing all temporary files for writing. Re-opening as read-only.") for k, v := range tempFileMap { fileName := v.Name() v.Close() tempFileMap[k], err = os.Open(fileName) errCheck(err) // We do not need this file after, so close it upon leaving this method defer tempFileMap[k].Close() } // STEP 3: // Receive the number of each label (class) we'd like to add to the training // set // Hold the amount of each label we'd like in the training set in a map trainCountMap := map[string]int{} fmt.Println("Please enter the number of each type of label you'd", "like in the training set.") // Ask user how much of each label they want and put it in a map // trainCountMap for k, v := range countMap { inputInt = promptInt(k, "label: %s max: %d", k, v) trainCountMap[k] = inputInt } debugMsg("Creating: %s", dataFile.Name()+".train") // Open a file for writing training data trainFile, err := os.OpenFile( dataFile.Name()+".train", os.O_CREATE+os.O_WRONLY+os.O_TRUNC, 0666) errCheck(err) // We do not need this file after, so close it upon leaving this method defer trainFile.Close() // STEP 4: // Read the correct amount of each label in for k, v := range trainCountMap { debugMsg("label: %s count: %d", k, v) dataReader := bufio.NewReader(tempFileMap[k]) // Open a file for writing testing data testFile, err := os.OpenFile( dataFile.Name()+"."+k+".test", os.O_CREATE+os.O_WRONLY+os.O_TRUNC, 0666) errCheck(err) // We do not need this file after, so close it upon leaving this method defer testFile.Close() if v > 0 { // Generate a random permuation var randomized sort.IntSlice randomized = rand.Perm(countMap[k]) // use a slice the first /v/ of them randomized = randomized[0:v] // sort the ints so that as we iterate through each instance we can // easily find the next one we need to export randomized.Sort() // Read through the file, writing the included instances to // .train and the others to .test lineCount := 0 if len(randomized) > 0 { for line, err = dataReader.ReadString('\n'); // read line by line err == nil; // stop on error line, err = dataReader.ReadString('\n') { if lineCount == randomized[0] { _, err = trainFile.WriteString(line) errCheck(err) if len(randomized) > 1 { randomized = randomized[1:len(randomized)] } else { randomized[0] = -1 // skip the rest } } else { _, err = testFile.WriteString(line) errCheck(err) } lineCount++ } } else { } } else { //TODO: Add a handler for -1 // None of the label were requested in the training set, so dump to test for line, err = dataReader.ReadString('\n'); // read line by line err == nil; // stop on error line, err = dataReader.ReadString('\n') { _, err = testFile.WriteString(line) errCheck(err) } } testFile.Close() } fmt.Println() }
func (f *File) saveUsingXrefStream() error { info, err := os.Stat(f.filename) if err != nil { return err } file, err := os.OpenFile(f.filename, os.O_RDWR|os.O_APPEND, 0666) if err != nil { return err } defer func() { err := file.Close() if err != nil { panic(err) } }() offset := info.Size() + 1 n, err := writeLineBreakTo(file) if err != nil { return err } offset += n xrefs := map[Integer]crossReference{} xrefs[0] = crossReference{0, 0, 65535} free := sort.IntSlice{0} for i := range f.objects { switch typed := f.objects[i].(type) { case crossReference: // no-op, don't need to write unchanged objects to file // however, we do need to handle the free list // xrefs[Integer(i)] = typed if typed[0] == 0 { free = append(free, int(i)) } case IndirectObject: xrefs[Integer(i)] = crossReference{1, uint(offset - 1), typed.GenerationNumber} n, err = typed.writeTo(file) if err != nil { return err } offset += n n, err = writeLineBreakTo(file) if err != nil { return err } offset += n case freeObject: xrefs[Integer(i)] = crossReference{0, 0, uint(typed)} free = append(free, int(i)) default: panic(fmt.Sprintf("unhandled type: %T", typed)) } } // Figure out the highest object number to set Size properly var maxObjNum uint for objNum := range f.objects { if objNum > maxObjNum { maxObjNum = objNum } } // add an xref for the xrefstream xrefstreamObjectNumber := uint(maxObjNum + 1) maxObjNum++ xref := crossReference{1, uint(offset - 1), 0} xrefs[Integer(xrefstreamObjectNumber)] = xref f.objects[xrefstreamObjectNumber] = xref // fill in the free linked list free.Sort() for i := 0; i < free.Len()-1; i++ { xref := xrefs[Integer(free[i])] xref[1] = uint(free[i+1]) xrefs[Integer(free[i])] = xref } objects := make(sort.IntSlice, 0, len(xrefs)) for objectNumber := range xrefs { objects = append(objects, int(objectNumber)) } objects.Sort() // group into consecutive sets groups := []sort.IntSlice{} groupStart := 0 for i := range objects { if i == 0 { continue } if objects[i] != objects[i-1]+1 { groups = append(groups, objects[groupStart:i]) groupStart = i } } groups = append(groups, objects[groupStart:]) // Create the xrefstream dictionary (the trailer) trailer := Dictionary{} trailer[Name("Size")] = Integer(maxObjNum + 1) // Prev if f.prev != 0 { trailer[Name("Prev")] = f.prev } // Root trailer[Name("Root")] = f.Root // Encrypt if len(f.Encrypt) != 0 { trailer[Name("Encrypt")] = f.Encrypt } // Info if f.Info.ObjectNumber != 0 { trailer[Name("Info")] = f.Info } // ID if len(f.ID) != 0 { trailer[Name("ID")] = f.ID } // Add xrefstream specific things to trailer trailer["Type"] = Name("XRef") // Index index := Array{} // fmt.Println(groups) for _, group := range groups { index = append(index, Integer(group[0]), Integer(len(group))) } trailer["Index"] = index // layout for the stream (W) maxXref := [3]uint{} for _, xref := range xrefs { for i := 0; i < len(xref); i++ { if xref[i] > maxXref[i] { maxXref[i] = xref[i] } } } nBytes := [3]int{} for i := range nBytes { nBytes[i] = nBytesForInt(int(maxXref[i])) } trailer["W"] = Array{Integer(nBytes[0]), Integer(nBytes[1]), Integer(nBytes[2])} // log.Println(xrefs) stream := &bytes.Buffer{} for _, group := range groups { for _, objectNumber := range group { xref := xrefs[Integer(objectNumber)] for i := range xref { _, err = stream.Write(intToBytes(xref[i], nBytes[i])) if err != nil { return err } } } } xrefstream := IndirectObject{ ObjectReference: ObjectReference{ ObjectNumber: xrefstreamObjectNumber, }, Object: Stream{ Dictionary: trailer, Stream: stream.Bytes(), }, } _, err = f.Add(xrefstream) if err != nil { return err } _, err = xrefstream.writeTo(file) if err != nil { return err } fmt.Fprintf(file, "\nstartxref\n%d\n%%%%EOF", offset-1) return nil }
func (f *File) saveUsingXrefTable() error { info, err := os.Stat(f.filename) if err != nil { return err } file, err := os.OpenFile(f.filename, os.O_WRONLY|os.O_APPEND, 0666) if err != nil { return err } defer func() { err := file.Close() if err != nil { panic(err) } }() offset := info.Size() + 1 n, err := writeLineBreakTo(file) if err != nil { return err } offset += n xrefs := map[Integer]crossReference{} xrefs[0] = crossReference{0, 0, 65535} free := sort.IntSlice{} for i := range f.objects { switch typed := f.objects[i].(type) { case crossReference: // no-op, don't need to write unchanged objects to file // however, we do need to handle the free list // xrefs[Integer(i)] = typed if typed[0] == 0 { free = append(free, int(i)) } case IndirectObject: xrefs[Integer(i)] = crossReference{1, uint(offset - 1), typed.GenerationNumber} n, err = typed.writeTo(file) if err != nil { return err } offset += n n, err = writeLineBreakTo(file) if err != nil { return err } offset += n case freeObject: xrefs[Integer(i)] = crossReference{0, 0, uint(typed)} free = append(free, int(i)) default: panic(fmt.Sprintf("unhandled type: %T", typed)) } } // fill in the free linked list free.Sort() for i := 0; i < free.Len()-1; i++ { xref := xrefs[Integer(free[i])] xref[1] = uint(free[i+1]) xrefs[Integer(free[i])] = xref } objects := make(sort.IntSlice, 0, len(xrefs)) for objectNumber := range xrefs { objects = append(objects, int(objectNumber)) } objects.Sort() // group into consecutive sets groups := []sort.IntSlice{} groupStart := 0 for i := range objects { if i == 0 { continue } if objects[i] != objects[i-1]+1 { groups = append(groups, objects[groupStart:i]) groupStart = i } } // add remaining group groups = append(groups, objects[groupStart:]) // write as an xref table to file fmt.Fprintf(file, "xref\n") for _, group := range groups { fmt.Fprintf(file, "%d %d\n", group[0], len(group)) for _, objectNumber := range group { xref := xrefs[Integer(objectNumber)] fmt.Fprintf(file, "%010d %05d ", xref[1], xref[2]) switch xref[0] { case 0: // f entries fmt.Fprintf(file, "f\r\n") case 1: // n entries fmt.Fprintf(file, "n\r\n") case 2: panic("can't be in xref table") default: panic("unhandled case") } } } // Setup create the trailer fmt.Fprintf(file, "\ntrailer\n") trailer := Dictionary{} // Size // Figure out the highest object number to set Size properly var maxObjNum uint for objNum := range f.objects { if objNum > maxObjNum { maxObjNum = objNum } } trailer[Name("Size")] = Integer(maxObjNum + 1) // Prev if f.prev != 0 { trailer[Name("Prev")] = f.prev } // Root trailer[Name("Root")] = f.Root // Encrypt if len(f.Encrypt) != 0 { trailer[Name("Encrypt")] = f.Encrypt } // Info if f.Info.ObjectNumber != 0 { trailer[Name("Info")] = f.Info } // ID if len(f.ID) != 0 { trailer[Name("ID")] = f.ID } _, err = trailer.writeTo(file) if err != nil { return err } fmt.Fprintf(file, "\nstartxref\n%d\n%%%%EOF", offset-1) return nil }
func main() { //Exercise 1. var numbers sort.IntSlice = []int{9, 2, 12, 1, 16, 12, 10, 12, 12, 9, 8, 9, 0, 11, 2, 1} numbers.Sort() //numbers are sorted and can be used as is. result := UniqueInts(numbers) fmt.Println("Exercise 1\n", result) //Exercise 2. irregularMatrix := [][]int{{1, 2, 3, 4}, {5, 6, 7, 8}, {9, 10, 11}, {12, 13, 14, 15}, {16, 17, 18, 19, 20}} slice := Flatten(irregularMatrix) fmt.Println("Exercise 2") fmt.Printf("1x%d: %v\n", len(slice), slice) //Exercise 3. numbers_exer3 := []int{155, 2, 12, 1, 16, 12, 222, 12, 12, 11, 8, 9, 5, 11, 2, 20} var column_count = 2 var fraction int var fraction_float float64 fraction_float = float64(len(numbers_exer3)) / float64(column_count) fraction = len(numbers_exer3) / column_count if fraction_float != float64(int64(fraction_float)) { numbers_exer3 := Make2D_helper(fraction_float, numbers_exer3, column_count) numbers_exer3_result := Make2D(numbers_exer3, column_count, fraction+1) fmt.Println("Exercise 3", column_count, numbers_exer3_result[column_count:]) } else { numbers_exer3_result := Make2D(numbers_exer3, column_count, fraction) fmt.Println("Exercise 3\n", column_count, numbers_exer3_result[column_count:]) } //Exercise 4. iniData := []string{ "#filter substitution", "[App]", ";", "; This field specifies your organization's name. This field is recommended,", "; but optional.", "Vendor=MozillaTest", ";", "; This field specifies your application's name. This field is required.", "Name=Simple", ";", "; This field specifies your application's version. This field is required.", "Version=0.1", ";", "; This field specifies your application's build ID (timestamp). This field is", "; required.", "BuildID=20070625", ";", "; This field specifies a compact copyright notice for your application. This", "; field is optional.", "Copyright=Copyright (c) 2004 Mozilla.org", ";", "; This ID is just an example. Every XUL app ought to have it's own unique ID.", "; You can use the microsoft 'guidgen' or 'uuidgen' tools, or go on", "; irc.mozilla.org and /msg botbot uuid. This field is optional.", "ID={3aea3f07-ffe3-4060-bb03-bff3a5365e90}", "", "[Gecko]", ";", "; This field is required. It specifies the minimum Gecko version that this", "; application requires.", "MinVersion=@MOZILLA_VERSION_U@", ";", "; This field is optional. It specifies the maximum Gecko version that this", "; application requires. It should be specified if your application uses", "; unfrozen interfaces.", "MaxVersion=@MOZILLA_VERSION_U@", "", "[Shell]", ";", "; This field specifies the location of your application's main icon with file", "; extension excluded. NOTE: Unix style file separators are required. This", "; field is optional.", "Icon=chrome/icons/default/simple", } fmt.Println("Exercise 4,5") //Exercise 5 // PrintIni() map_data := ParseIni(iniData) PrintIni(map_data) }