Example #1
0
func (this *TAppDecTop) xFlushOutput(pcListPic *list.List) {
	if pcListPic == nil {
		return
	}

	//fmt.Printf("list len=%d\n", pcListPic.Len());

	for e := pcListPic.Front(); e != nil; e = e.Next() {
		pcPic := e.Value.(*TLibCommon.TComPic)
		if pcPic.GetOutputMark() {
			// write to file
			if this.m_pchReconFile != "" {
				conf := pcPic.GetConformanceWindow()
				var defDisp *TLibCommon.Window
				if this.m_respectDefDispWindow != 0 {
					defDisp = pcPic.GetDefDisplayWindow()
				} else {
					defDisp = TLibCommon.NewWindow()
				}

				this.m_cTVideoIOYuvReconFile.Write(pcPic.GetPicYuvRec(),
					conf.GetWindowLeftOffset()+defDisp.GetWindowLeftOffset(),
					conf.GetWindowRightOffset()+defDisp.GetWindowRightOffset(),
					conf.GetWindowTopOffset()+defDisp.GetWindowTopOffset(),
					conf.GetWindowBottomOffset()+defDisp.GetWindowBottomOffset())
			}

			// update POC of display order
			this.m_iPOCLastDisplay = int(pcPic.GetPOC())
			//fmt.Printf("m_iPOCLastDisplay=%d\n",this.m_iPOCLastDisplay);

			// erase non-referenced picture in the reference picture list after display
			if !pcPic.GetSlice(0).IsReferenced() && pcPic.GetReconMark() == true {
				//#if !DYN_REF_FREE
				pcPic.SetReconMark(false)

				// mark it should be extended later
				pcPic.GetPicYuvRec().SetBorderExtension(false)

				//#else
				//        pcPic->destroy();
				//        pcListPic->erase( iterPic );
				//        iterPic = pcListPic->begin(); // to the beginning, non-efficient way, have to be revised!
				//        continue;
				//#endif
			}
			pcPic.SetOutputMark(false)
		}
		//#if !DYN_REF_FREE
		if pcPic != nil {
			pcPic.Destroy()
			//delete pcPic;
			pcPic = nil
		}
		//#endif
	}

	pcListPic.Init()
	this.m_iPOCLastDisplay = -TLibCommon.MAX_INT
}
Example #2
0
func LoadTemplates(name string, loader TemplateLoader) (*template.Template, error) {
	load_tracker := map[string]bool{name: true}
	var load_queue list.List
	load_queue.Init()
	load_queue.PushBack(name)

	t := template.New(name).Funcs(builtins)
	for e := load_queue.Front(); e != nil; e = e.Next() {
		template_name := e.Value.(string)
		new_template, err := loader.LoadTemplate(template_name)
		if err != nil {
			return nil, err
		}

		if _, err := t.Parse(new_template.Data); err != nil {
			return nil, err
		}

		if t.Lookup(template_name) == nil {
			return nil, fmt.Errorf(`template "%s"load failed.`, template_name)
		}

		for _, new_name := range new_template.Dependencies {
			if !load_tracker[new_name] {
				load_tracker[new_name] = true
				load_queue.PushBack(new_name)
			}
		}
	}

	return t, nil
}
Example #3
0
func outOfOrder(l *list.List) {
	iTotal := 25
	if iTotal > l.Len() {
		iTotal = l.Len()
	}
	ll := make([]*list.List, iTotal)

	for i := 0; i < iTotal; i++ {
		ll[i] = list.New()
	}
	r := rand.New(rand.NewSource(time.Now().UnixNano()))
	for e := l.Front(); e != nil; e = e.Next() {
		fpath, ok := e.Value.(string)
		if !ok {
			panic("The path is invalid string")
		}
		if rand.Int()%2 == 0 {
			ll[r.Intn(iTotal)].PushFront(fpath)
		} else {
			ll[r.Intn(iTotal)].PushBack(fpath)
		}
	}

	r0 := rand.New(rand.NewSource(time.Now().UnixNano()))
	l.Init()
	for i := 0; i < iTotal; i++ {
		if r0.Intn(2) == 0 {
			l.PushBackList(ll[i])
		} else {
			l.PushFrontList(ll[i])
		}
		ll[i].Init()
	}
}
Example #4
0
func (this *InputNALUnit) convertPayloadToRBSP(nalUnitBuf *list.List, pcBitstream *TLibCommon.TComInputBitstream, isVclNalUnit bool) *list.List {
	zeroCount := 0
	it_write := list.New()
	oldBuf := list.New()
	for e := nalUnitBuf.Front(); e != nil; e = e.Next() {
		//assert(zeroCount < 2 || *it_read >= 0x03);
		it_read := e.Value.(byte)
		oldBuf.PushBack(it_read)
		if zeroCount == 2 && it_read == 0x03 {
			zeroCount = 0

			e = e.Next()
			if e == nil {
				break
			} else {
				it_read = e.Value.(byte)
				oldBuf.PushBack(it_read)
			}
		}

		if it_read == 0x00 {
			zeroCount++
		} else {
			zeroCount = 0
		}
		it_write.PushBack(it_read)
	}

	//assert(zeroCount == 0);
	if isVclNalUnit {
		// Remove cabac_zero_word from payload if present
		n := 0

		e := it_write.Back()
		it_read := e.Value.(byte)
		for it_read == 0x00 {
			it_write.Remove(e)
			e = e.Prev()
			it_read = e.Value.(byte)
			n++
		}

		if n > 0 {
			fmt.Printf("\nDetected %d instances of cabac_zero_word", n/2)
		}
	}

	nalUnitBuf.Init() // = .resize(it_write - nalUnitBuf.begin());
	for e := it_write.Front(); e != nil; e = e.Next() {
		it_read := e.Value.(byte)
		nalUnitBuf.PushBack(it_read)
	}

	return oldBuf
}
Example #5
0
File: lst.go Project: tgrijalva/lst
func empty(l *list.List, args []string) {
	// print out all elements before droping
	itemNumber := 1
	for e := l.Front(); e != nil; e = e.Next() {
		fmt.Printf("Removing item[%d]:", itemNumber)
		printJSON(e.Value)
		itemNumber++
	}

	// empty list
	l.Init()
}
Example #6
0
// Serve the clients their events and destroy them when its time
// This also triggers the done chan, when c is closed
func serveChannels(c <-chan interface{}, clients *list.List, done chan<- struct{}) {
	for {
		var (
			event       interface{}
			channelOpen bool
		)
		event = nil
		channelOpen = true
		select {
		case event, channelOpen = <-c:
		case <-time.After(120 * time.Second):
		}

		if !channelOpen {
			close(done)

			// wait so no new clients come any longer
			<-time.After(1 * time.Second)
			for client := range clientsIter(clients) {
				close(client.value.channel)
			}

			// empty all references
			clients.Init()
			return
		}

		// channel is still open
		var wg sync.WaitGroup
		for client := range clientsIter(clients) {
			select {
			case <-client.value.quit:
				// client has quit
				client.removeFrom(clients)
				continue
			default:
			}

			// client has too many strikes
			if client.value.strikes >= 5 {
				client.removeFrom(clients)
				continue
			}

			// client has not quit
			wg.Add(1)
			go sendEvent(event, client.value, &wg)
		}
		// wait for the events to be sent
		wg.Wait()
	}
}
func TestList(t *testing.T) {
	var myInstance *MyType
	myInstance = new(MyType)
	myInstance.name = "hello"

	var myList list.List
	myList.Init()
	myList.PushFront(myInstance)

	x := myList.Front().Value.(*MyType)

	fmt.Println((*x).name)
}
Example #8
0
func ListSwap(ls1 *list.List, ls2 *list.List) {
	tmpLs := list.New()
	for l1 := ls1.Front(); l1 != nil; l1 = l1.Next() {
		tmpLs.PushBack(l1)
	}
	ls1 = ls1.Init()
	for l2 := ls2.Front(); l2 != nil; l2 = l2.Next() {
		ls1.PushBack(l2)
	}
	ls2 = ls2.Init()
	for tl := tmpLs.Front(); tl != nil; tl = tl.Next() {
		ls2.PushBack(tl)
	}
}
Example #9
0
func (st *offlineSubTask) flush(id uint64, l *list.List) {
	path := filepath.Join(st.baseDir, fmt.Sprintf("%d", st.id), fmt.Sprintf("%d", id))
	var file *os.File
	var err error
	if file, err = open(path, os.O_WRONLY|os.O_APPEND|os.O_CREATE); err != nil {
		panic(err)
	}
	writer := bufio.NewWriter(file)
	defer file.Close()
	for e := l.Front(); e != nil; e = e.Next() {
		msg := e.Value.(RouteMsg)
		_writeMsg(writer, msg)
		st.cacheBytes -= uint64(len(msg.Body()))
	}
	writer.Flush()
	l.Init()
}
Example #10
0
// Parse splits code into `Section`s
func parse(source string, code []byte) *list.List {
	lines := bytes.Split(code, []byte("\n"))
	sections := new(list.List)
	sections.Init()
	language := getLanguage(source)

	var hasCode bool
	var codeText = new(bytes.Buffer)
	var docsText = new(bytes.Buffer)

	// save a new section
	save := func(docs, code []byte) {
		// deep copy the slices since slices always refer to the same storage
		// by default
		docsCopy, codeCopy := make([]byte, len(docs)), make([]byte, len(code))
		copy(docsCopy, docs)
		copy(codeCopy, code)
		sections.PushBack(&Section{docsCopy, codeCopy, nil, nil})
	}

	for _, line := range lines {
		// if the line is a comment
		if language.commentMatcher.Match(line) {
			// but there was previous code
			if hasCode {
				// we need to save the existing documentation and text
				// as a section and start a new section since code blocks
				// have to be delimited before being sent to Pygments
				save(docsText.Bytes(), codeText.Bytes())
				hasCode = false
				codeText.Reset()
				docsText.Reset()
			}
			docsText.Write(language.commentMatcher.ReplaceAll(line, nil))
			docsText.WriteString("\n")
		} else {
			hasCode = true
			codeText.Write(line)
			codeText.WriteString("\n")
		}
	}
	// save any remaining parts of the source file
	save(docsText.Bytes(), codeText.Bytes())
	return sections
}
Example #11
0
func (bb *ReportQueue) PE_ParetoTestError() {
	bb.less = lessSizeTestError
	sort.Sort(bb)

	var pareto list.List
	pareto.Init()
	for i, _ := range bb.queue {
		if bb.queue[i] == nil {
			continue
		}
		pareto.PushBack(bb.queue[i])
	}

	over := len(bb.queue) - 1
	for pareto.Len() > 0 && over >= 0 {
		pe := pareto.Front()
		eLast := pe
		pb := pe.Value.(*ExprReport)
		cSize := pb.Size()
		cScore := pb.testError
		pe = pe.Next()
		for pe != nil && over >= 0 {
			pb := pe.Value.(*ExprReport)
			if pb.testError < cScore {
				cScore = pb.testError
				if pb.Size() > cSize {
					bb.queue[over] = eLast.Value.(*ExprReport)
					over--
					pareto.Remove(eLast)
					cSize = pb.Size()
					eLast = pe
				}
			}
			pe = pe.Next()
		}
		if over < 0 {
			break
		}

		bb.queue[over] = eLast.Value.(*ExprReport)
		over--
		pareto.Remove(eLast)
	}
}
Example #12
0
/* delete all the elements with time <= t */
func DeleteBefore(t Time, L *list.List) {
	if L.Len() == 0 {
		return
	}
	back := L.Back()
	if back.Value.(Elem).GetTime() <= t {
		L = L.Init()
		return
	}
Loop:
	for {
		el := L.Front()
		if el.Value.(Elem).GetTime() <= t {
			L.Remove(el)
		} else {
			break Loop
		}
	}
}
Example #13
0
/* delete all the elements with time >= t */
func DeleteAfter(t Time, L *list.List) {
	if L.Len() == 0 {
		return
	}
	front := L.Front()
	if front.Value.(Elem).GetTime() >= t {
		L = L.Init()
		return
	}
Loop:
	for {
		el := L.Back()
		if el.Value.(Elem).GetTime() >= t {
			L.Remove(el)
		} else {
			break Loop
		}
	}
}
Example #14
0
func tileWorker(T *quadratic.Map, alternativeStack chan *list.List, sink chan<- *quadratic.Map, workerCount chan int, halt chan int, tileMaps []*quadratic.Map, chooseNextEdge func(*quadratic.Map) *quadratic.Edge, maxtiles int, tileSymmetry string, showIntermediate bool) {
	localAlternatives := new(list.List)
Work:
	for {
		select {
		case <-halt:
			halt <- 1
			fmt.Fprintf(os.Stderr, "premature halt\n")
			return
		case L := <-alternativeStack:
			L.PushFrontList(localAlternatives)
			localAlternatives.Init()
			alternativeStack <- L
		default:
			if T.Faces.Len() > maxtiles && maxtiles > 0 {
				sink <- T
				break Work
			} else if noActiveFaces(T) {
				finishTime, _, _ := os.Time()
				fmt.Fprintf(os.Stderr, "new tiling complete, took %v seconds\n", finishTime-initializationTime)
				sink <- T
				break Work
			} else if showIntermediate {
				sink <- T
			}
			alternatives := addTilesByEdge(T, tileMaps, chooseNextEdge, tileSymmetry)
			if alternatives.Len() == 0 {
				break Work
			}
			T = alternatives.Remove(alternatives.Front()).(*quadratic.Map)
			localAlternatives.PushFrontList(alternatives)
			//fmt.Fprintf(os.Stderr,"currently have %v faces\n",T.Faces.Len())
		}
	}
	L := <-alternativeStack
	L.PushFrontList(localAlternatives)
	localAlternatives.Init()
	alternativeStack <- L

	workers := <-workerCount
	workerCount <- workers - 1
}
Example #15
0
File: wc.go Project: kennyhlam/6824
// our simplified version of MapReduce does not supply a
// key to the Map function, as in the paper; only a value,
// which is a part of the input file contents
func Map(value string) *list.List {
	m := make(map[string]int)
	words := strings.FieldsFunc(value, func(r rune) bool { return !unicode.IsLetter(r) })
	for i := 0; i < len(words); i++ {
		word := words[i]
		v, ok := m[word]
		if ok {
			m[word] = v + 1
		} else {
			m[word] = 1
		}
	}
	l := new(list.List)
	l.Init()
	for k, val := range m {
		kv := mapreduce.KeyValue{Key: k, Value: strconv.Itoa(val)}
		l.PushBack(kv)
	}
	//  fmt.Println("DONE WITH MAPPING PHASE\n\n")
	return l
}
Example #16
0
// Dump the DHT
// First line of response is number of items
// It is important that the DHT is delivered in one go as
// otherwise there will be sync issues. Imagine you dump a bit now, then delete
// a message what was part of that dump and then dump some more, the delete would
// be lost.
// Will this work for 1,000,000 messages in the DHT?
func (myDHTServer *DHTServerStruct) dumpDHT(con *net.TCPConn) {

	// Query DHT
	sql := fmt.Sprintf("SELECT id,sha1,mailbox,size,orignodeid FROM DHT order by id")
	G_dhtDBLock.Lock()
	stmt, serr := myDHTServer.dht.Prepare(sql)
	defer stmt.Finalize()
	defer G_dhtDBLock.Unlock()
	if serr == nil {
		var id int
		var sha1 string
		var mailbox string
		var size int
		var orignodeid int

		var r list.List
		r.Init()
		stmt.Exec()
		rowcount := 0
		for stmt.Next() {
			err := stmt.Scan(&id, &sha1, &mailbox, &size, &orignodeid)
			if err != nil {
				myDHTServer.logger.Logf(LMIN, "Unexpected error using DB: %s", err)
				break
			}
			reply := fmt.Sprintf("%d,%s,%s,%d,%d\r\n", id, sha1, mailbox, size, orignodeid)
			r.PushBack(reply)
			rowcount++
		}

		con.Write([]byte(fmt.Sprintf("%d\r\n", rowcount)))

		for c := range r.Iter() {
			con.Write([]byte(c.(string)))
		}
	} else {
		myDHTServer.logger.Logf(LMIN, "Unexpected error using DB (%s): %s", sql, serr)
	}
}
Example #17
0
// Send the New Message Log to a remote node
// First line of response is number of items
func (myDHTServer *DHTServerStruct) sendNewMessageLog(con *net.TCPConn, hid string) {

	// Query newMessageLog
	sql := fmt.Sprintf("SELECT id, sha1, mailbox, size FROM newMessageLog where id > %s order by id", hid)
	G_nmlDBLock.Lock()
	stmt, serr := myDHTServer.nml.Prepare(sql)
	defer stmt.Finalize()
	defer G_nmlDBLock.Unlock()
	if serr == nil {
		var id int
		var sha1 string
		var mailbox string
		var size int

		var r list.List
		r.Init()
		stmt.Exec()
		rowcount := 0
		for stmt.Next() {
			err := stmt.Scan(&id, &sha1, &mailbox, &size)
			if err != nil {
				myDHTServer.logger.Logf(LMIN, "Unexpected error using DB: %s", err)
				break
			}
			reply := fmt.Sprintf("%d,%s,%s,%d,%s\r\n", id, sha1, mailbox, size, G_nodeID)
			r.PushBack(reply)
			rowcount++
		}

		con.Write([]byte(fmt.Sprintf("%d\r\n", rowcount)))

		for c := range r.Iter() {
			con.Write([]byte(c.(string)))
		}
	} else {
		myDHTServer.logger.Logf(LMIN, "Unexpected error using DB (%s): %s", sql, serr)
	}
}
Example #18
0
func (this *Dictionary) SortList(ls *list.List, pref map[string]string) {
	tmpLs := make([]string, ls.Len())
	count := 0
	for i := ls.Front(); i != nil; i = i.Next() {
		tmpLs[count] = i.Value.(string)
		count++
	}

	for i := 0; i < len(tmpLs); i++ {
		for j := i + 1; j < len(tmpLs); j++ {
			if this.less(tmpLs[j], tmpLs[i], pref) {
				tmp := tmpLs[j]
				tmpLs[j] = tmpLs[i]
				tmpLs[i] = tmp
			}
		}
	}

	ls = ls.Init()
	for i := 0; i < len(tmpLs); i++ {
		ls.PushBack(tmpLs[i])
	}
}
Example #19
0
func (this *SemanticDB) getWNKeys(form string, lemma string, tag string, searchList *list.List) {
	searchList = searchList.Init()
	for p := this.posMap.Front(); p != nil; p = p.Next() {
		LOG.Trace("Check tag " + tag + " with posmap " + p.Value.(PosMapRule).pos + " " + p.Value.(PosMapRule).wnpos + " " + p.Value.(PosMapRule).lemma)
		if strings.Index(tag, p.Value.(PosMapRule).pos) == 0 {
			LOG.Trace("   matched")
			var lm string
			if p.Value.(PosMapRule).lemma == "L" {
				lm = lemma
			} else if p.Value.(PosMapRule).lemma == "F" {
				lm = form
			} else {
				LOG.Trace("FOund word matching special map: " + lemma + " " + p.Value.(PosMapRule).lemma)
				lm = this.formDict.accessDatabase(lemma + " " + p.Value.(PosMapRule).lemma)
			}

			fms := StrArray2StrList(Split(lm, " "))
			for ifm := fms.Front(); ifm != nil; ifm = ifm.Next() {
				LOG.Trace("Adding word '" + form + "' to be searched with pos=" + p.Value.(PosMapRule).pos + " and lemma=" + ifm.Value.(string))
				searchList.PushBack(Pair{ifm.Value.(string), p.Value.(PosMapRule).wnpos})
			}
		}
	}
}
Example #20
0
func (this *Tokenizer) Tokenize(p string, offset int, v *list.List) {
	var t [10]string
	var i *list.Element
	var match bool
	substr := 0
	ln := 0

	v = v.Init()

	cont := 0
	for cont < len(p) {
		for WhiteSpace(p[cont]) {
			cont++
			offset++
		}
		LOG.Trace("Tokenizing [" + p[cont:] + "]")
		match = false

		for i = this.rules.Front(); i != nil && !match; i = i.Next() {
			LOG.Trace("Checking rule " + i.Value.(Pair).first.(string))
			ps := strings.Index(p[cont:], " ")
			delta := cont + ps
			if ps == -1 {
				delta = cont + len(p) - cont
			}
			results := RegExHasSuffix(i.Value.(Pair).second.(*regexp.Regexp), p[cont:delta])
			if len(results) > 0 {
				match = true
				ln = 0
				substr = this.matches[i.Value.(Pair).first.(string)]
				for j := If(substr == 0, 0, 1).(int); j <= substr && match; j++ {
					t[j] = results[j]
					ln += len(t[j])
					LOG.Trace("Found match " + strconv.Itoa(j) + " [" + t[j] + "] for rule " + i.Value.(Pair).first.(string))
					if string(i.Value.(Pair).first.(string)[0]) == "*" {
						lower := strings.ToLower(t[j])
						if !this.abrevs.Has(lower) {
							match = false
							LOG.Trace("Special rule and found match not in abbrev list. Rule not satisfied")
						}
					}
				}
			}

		}

		if match {
			if i == nil {
				i = this.rules.Back()
			} else {
				i = i.Prev()
			}
			substr = this.matches[i.Value.(Pair).first.(string)]
			for j := If(substr == 0, 0, 1).(int); j <= substr && match; j++ {
				if len(t[j]) > 0 {
					LOG.Trace("Accepting matched substring [" + t[j] + "]")
					w := NewWordFromLemma(t[j])
					w.setSpan(offset, offset+len(t[j]))
					offset += len(t[j])
					v.PushBack(w)
				} else {
					LOG.Trace("Skipping matched null substring [" + t[j] + "]")
				}
			}
			cont += ln
		} else if cont < len(p) {
			LOG.Warn("No rule matched input substring" + p[cont:] + " . Character " + string(p[cont:][0]) + " skipped . Check your tokenization rules")
			cont++
		}
	}

	offset++
}
Example #21
0
func main() {

	var sigchan = make(chan os.Signal, 1)
	signal.Notify(sigchan, os.Interrupt)
	go func() {
		<-sigchan
		fmt.Fprintf(os.Stdout, "\n")
		os.Exit(0)
	}()

	pacemakerPresentPattern, _ := regexp.Compile("PACEMAKER_PRESENT")
	pacemakerIterationPattern, _ := regexp.Compile("PACEMAKER_ITERATION")
	numberPattern, _ := regexp.Compile("[0-9.]+$")

	scanner := bufio.NewScanner(os.Stdin)
	var buffer list.List
	var legend string
	var scaleHasChanged = false
	w, _, _ := terminal.GetSize(1)
	terminalWidth := uint(w - 10)

	lastSampleTaken := time.Now()
	pacemakerPresent := false

	for scanner.Scan() {
		lineOfText := scanner.Text()
		if pacemakerPresentPattern.MatchString(lineOfText) {
			pacemakerPresent = true
			if msBetweenSamples != defaultMsBetweenSamples {
				log.Fatal("You specified a value for sample-period-ms but a pacemaker signal was found. If you are processing logs retrospectively stick with the pacemaker. If you are sampling in real time ditch the pacemaker.")
			}
		}
		pacemakerIterationSignal := pacemakerIterationPattern.MatchString(lineOfText)
		if !pacemakerIterationSignal && numberPattern.MatchString(lineOfText) {
			var dataPoint float64
			numberText := numberPattern.FindString(lineOfText)
			dataPoint, _ = strconv.ParseFloat(numberText, 64)
			if dataPoint > maximumValue {
				maximumValue = 1.2 * dataPoint
				scaleHasChanged = true
			}
			buffer.PushBack(dataPoint)
		}
		if pacemakerIterationSignal || (!pacemakerPresent && time.Since(lastSampleTaken) >= time.Millisecond*time.Duration(msBetweenSamples)) {
			timeText := time.Time.Format(time.Now(), "15:04:05")
			if pacemakerIterationSignal {
				timeText = strings.Split(lineOfText, " ")[1]
			}
			histogram, newMaximumMagnitude, maximumMagnitudeHasChanged := sample(buffer, maximumValue, terminalWidth, maximumMagnitude)
			if maximumMagnitudeHasChanged {
				maximumMagnitude = newMaximumMagnitude
			} // otherwise scope means it is forgotten each time
			legend = updateLegendAndNotifyIfScaleHasChanged(legend, maximumValue, scaleHasChanged, terminalWidth)
			printSample(histogram, timeText, maximumValue, terminalWidth, maximumMagnitude, maximumMagnitudeHasChanged)
			printScale(histogram, int(len(timeText)), legend)
			// reset for next sample
			scaleHasChanged = false
			buffer.Init()
			lastSampleTaken = time.Now()
		}
	}
	if err := scanner.Err(); err != nil {
		fmt.Fprintln(os.Stderr, "reading standard input:", err)
	}
	fmt.Fprint(os.Stdout, "\n")
}
Example #22
0
func (this *Splitter) Split(st *SplitterStatus, v *list.List, flush bool, ls *list.List) {
	ls = ls.Init()
	LOG.Trace("Looking for a sentence marker. Max no split is " + strconv.Itoa(int(this.SPLIT_MaxWords)))
	for w := v.Front(); w != nil; w = w.Next() {
		m := this.markers[w.Value.(*Word).getForm()]
		checkSplit := true

		if st.BetweenMark && !this.SPLIT_AllowBetweenMarkers && m != 0 && m == If(m > SAME, 1, -1).(int)*st.MarkType.Front().Value.(int) {
			LOG.Trace("End no split period. marker " + w.Value.(*Word).getForm() + " code: " + strconv.Itoa(m))
			st.MarkType.Remove(st.MarkType.Front())
			st.MarkForm.Remove(st.MarkForm.Front())
			if st.MarkForm.Len() == 0 {
				st.BetweenMark = false
				st.NoSplitCount = 0
			} else {
				st.NoSplitCount++
			}

			st.buffer.PushBack(w.Value.(*Word))
			checkSplit = false
		} else if m > 0 && !this.SPLIT_AllowBetweenMarkers {
			st.MarkForm.PushFront(w.Value.(*Word).getForm())
			st.MarkType.PushFront(m)
			LOG.Trace("Start no split periood, marker " + w.Value.(*Word).getForm() + " code:" + strconv.Itoa(m))
			st.BetweenMark = true
			st.NoSplitCount++
			st.buffer.PushBack(w.Value.(*Word))
			checkSplit = false
		} else if st.BetweenMark {
			LOG.Trace("no-split flag continues set. word " + w.Value.(*Word).getForm() + " expecting code " + strconv.Itoa(st.MarkType.Front().Value.(int)) + " (closing " + st.MarkForm.Front().Value.(string))
			st.NoSplitCount++
			if this.SPLIT_MaxWords == 0 || st.NoSplitCount <= int(this.SPLIT_MaxWords) {
				checkSplit = false
				st.buffer.PushBack(w.Value.(*Word))
			}

			if st.NoSplitCount == VERY_LONG {
				LOG.Warn("Sentence is very long")
			}
		}

		if checkSplit {
			e := this.enders[w.Value.(*Word).getForm()]
			if e {
				if e || this.endOfSentence(w, v) {
					LOG.Trace("Sentence marker [" + w.Value.(*Word).getForm() + "] found")
					st.buffer.PushBack(w.Value.(*Word))
					st.nsentence++
					st.buffer.sentID = strconv.Itoa(st.nsentence)
					ls.PushBack(st.buffer)
					LOG.Trace("Sentence lenght " + strconv.Itoa(st.buffer.Len()))
					nsentence := st.nsentence
					this.CloseSession(st)
					st = this.OpenSession()
					st.nsentence = nsentence
				} else {
					LOG.Trace(w.Value.(*Word).getForm() + " is not a sentence marker here")
					st.buffer.PushBack(w.Value.(*Word))
				}
			} else {
				LOG.Trace(w.Value.(*Word).getForm() + " is not a sentence marker here")
				st.buffer.PushBack(w.Value.(*Word))
			}
		}
	}

	if flush && st.buffer.Len() > 0 {
		LOG.Trace("Flushing the remaining words into a sentence")
		st.nsentence++
		st.buffer.sentID = strconv.Itoa(st.nsentence)
		ls.PushBack(st.buffer)
		nsentence := st.nsentence
		this.CloseSession(st)
		st = this.OpenSession()
		st.nsentence = nsentence
	}
}
Example #23
0
func (this *Dictionary) AnnotateWord(w *Word, lw *list.List, override bool) bool {
	LOG.Trace("Searching in dictionary for word " + w.getForm())
	la := list.New()
	this.SearchForm(w.getForm(), la)
	w.setFoundInDict(la.Len() > 0)
	LOG.Trace("   Found " + strconv.Itoa(la.Len()) + " analysis.")
	for a := la.Front(); a != nil; a = a.Next() {
		w.addAnalysis(a.Value.(*Analysis))
		LOG.Trace("   added analysis " + a.Value.(*Analysis).getLemma())
	}

	if this.CompoundAnalysis {
		//TODO
	}

	contr := false

	if !this.RetokenizeContractions || override {
		newLa := list.New()
		na := &Analysis{}

		for a := w.Front(); a != nil; a = a.Next() {
			tgs := list.New()
			tmpItems := Split(a.Value.(*Analysis).getTag(), "+")
			for _, tmpItem := range tmpItems {
				tgs.PushBack(tmpItem)
			}
			tc := this.tagCombination(tgs.Front(), tgs.Back().Prev())

			if tc.Len() > 1 {
				newLa = newLa.Init()
				for tag := tc.Front(); tag != nil; tag = tag.Next() {
					na.init(a.Value.(*Analysis).getLemma(), tag.Value.(string))
					newLa.PushBack(na)
				}

				ta := a
				for t := newLa.Front(); t != nil; t = t.Next() {
					ta = w.InsertAfter(ta, t)
				}
				w.Remove(a)
				a = ta
			}
		}

		for a := w.Front(); a != nil; a = a.Next() {
			lw = lw.Init()
			if this.CheckContracted(w.getForm(), a.Value.(*Analysis).getLemma(), a.Value.(*Analysis).getTag(), lw) {
				a.Value.(*Analysis).setRetokenizable(lw)
			}
		}
	} else {
		ca := w.Front()
		for ca != nil && (strings.Index(ca.Value.(*Analysis).getLemma(), "+") == -1 || strings.Index(ca.Value.(*Analysis).getTag(), "+") == -1) {
			ca = ca.Next()
		}

		if ca != nil && w.getNAnalysis() > 1 {
			LOG.Warn("Contraction " + w.getForm() + " has several analysis in dictionary. All ignored except (" + ca.Value.(*Analysis).getLemma() + "," + ca.Value.(*Analysis).getTag() + "). Set RetokenizeContraction=false to keep all analysis.")
		} else {
			ca = w.Front()
		}
		if ca != nil && this.CheckContracted(w.getForm(), ca.Value.(*Analysis).getLemma(), ca.Value.(*Analysis).getTag(), lw) {
			contr = true
		}
	}

	return contr
}