func (r *CompressingTermVectorsReader) Close() (err error) { if !r.closed { err = util.Close(r.vectorsStream) r.closed = true } return err }
func (r *SegmentCoreReaders) decRef() { if atomic.AddInt32(&r.refCount, -1) == 0 { util.Close( /*self.termVectorsLocal, self.fieldsReaderLocal, docValuesLocal, normsLocal,*/ r.fields, r.dvProducer, r.termVectorsReaderOrig, r.fieldsReaderOrig, r.cfsReader, r.normsProducer) r.notifyListener <- r.owner } }
func (r *CompressingStoredFieldsReader) Close() (err error) { if !r.closed { if err = util.Close(r.fieldsStream); err == nil { r.closed = true } } return err }
func (r *BlockTreeTermsReader) Close() error { defer func() { // Clear so refs to terms index is GCable even if // app hangs onto us: r.fields = make(map[string]FieldReader) }() return util.Close(r.in, r.postingsReader) }
func (dvp *PerFieldDocValuesReader) Close() error { fps := make([]DocValuesProducer, 0) for _, v := range dvp.formats { fps = append(fps, v) } items := make([]io.Closer, len(fps)) for i, v := range fps { items[i] = v } return util.Close(items...) }
func (r *PerFieldPostingsReader) Close() error { fps := make([]FieldsProducer, 0) for _, v := range r.formats { fps = append(fps, v) } items := make([]io.Closer, len(fps)) for i, v := range fps { items[i] = v } return util.Close(items...) }
func newLucene42DocValuesProducer(state SegmentReadState, dataCodec, dataExtension, metaCodec, metaExtension string) (dvp *Lucene42DocValuesProducer, err error) { dvp = &Lucene42DocValuesProducer{} dvp.maxDoc = int(state.segmentInfo.docCount) metaName := util.SegmentFileName(state.segmentInfo.name, state.segmentSuffix, metaExtension) // read in the entries from the metadata file. in, err := state.dir.OpenInput(metaName, state.context) if err != nil { return dvp, err } success := false defer func() { if success { err = util.Close(in) } else { util.CloseWhileSuppressingError(in) } }() version, err := codec.CheckHeader(in, metaCodec, LUCENE42_DV_VERSION_START, LUCENE42_DV_VERSION_CURRENT) if err != nil { return dvp, err } dvp.numerics = make(map[int]NumericEntry) dvp.binaries = make(map[int]BinaryEntry) dvp.fsts = make(map[int]FSTEntry) err = dvp.readFields(in, state.fieldInfos) if err != nil { return dvp, err } success = true success = false dataName := util.SegmentFileName(state.segmentInfo.name, state.segmentSuffix, dataExtension) dvp.data, err = state.dir.OpenInput(dataName, state.context) if err != nil { return dvp, err } version2, err := codec.CheckHeader(dvp.data, dataCodec, LUCENE42_DV_VERSION_START, LUCENE42_DV_VERSION_CURRENT) if err != nil { return dvp, err } if version != version2 { return dvp, errors.New("Format versions mismatch") } return dvp, nil }
func (d *CompoundFileDirectory) Close() error { log.Printf("Closing %v...", d) if d == nil { // interface not nil return nil } d.lock.Lock() defer d.lock.Unlock() if d == nil || !d.isOpen { log.Print("CompoundFileDirectory is already closed.") // allow double close - usually to be consistent with other closeables return nil // already closed } d.isOpen = false /* if d.writer != nil { // assert d.openForWrite return writer.Close() } else {*/ return util.Close(d.handle) // } }
// CompressingStoredFieldsReader.java L90 func newCompressingStoredFieldsReader(d store.Directory, si SegmentInfo, segmentSuffix string, fn FieldInfos, ctx store.IOContext, formatName string, compressionMode codec.CompressionMode) (r *CompressingStoredFieldsReader, err error) { r = &CompressingStoredFieldsReader{} r.compressionMode = compressionMode segment := si.name r.fieldInfos = fn r.numDocs = int(si.docCount) var indexStream store.IndexInput success := false defer func() { if !success { log.Println("Failed to initialize CompressionStoredFieldsReader.") if err != nil { log.Print(err) } util.Close(r, indexStream) } }() // Load the index into memory indexStreamFN := util.SegmentFileName(segment, segmentSuffix, LUCENE40_SF_FIELDS_INDEX_EXTENSION) indexStream, err = d.OpenInput(indexStreamFN, ctx) if err != nil { return nil, err } codecNameIdx := formatName + CODEC_SFX_IDX codec.CheckHeader(indexStream, codecNameIdx, CODEC_SFX_VERSION_START, CODEC_SFX_VERSION_CURRENT) if int64(codec.HeaderLength(codecNameIdx)) != indexStream.FilePointer() { panic("assert fail") } r.indexReader, err = newCompressingStoredFieldsIndexReader(indexStream, si) if err != nil { return nil, err } err = indexStream.Close() if err != nil { return nil, err } indexStream = nil // Open the data file and read metadata fieldsStreamFN := util.SegmentFileName(segment, segmentSuffix, LUCENE40_SF_FIELDS_EXTENSION) r.fieldsStream, err = d.OpenInput(fieldsStreamFN, ctx) if err != nil { return nil, err } codecNameDat := formatName + CODEC_SFX_DAT codec.CheckHeader(r.fieldsStream, codecNameDat, CODEC_SFX_VERSION_START, CODEC_SFX_VERSION_CURRENT) if int64(codec.HeaderLength(codecNameDat)) != r.fieldsStream.FilePointer() { panic("assert fail") } n, err := r.fieldsStream.ReadVInt() if err != nil { return nil, err } r.packedIntsVersion = int(n) r.decompressor = compressionMode.NewDecompressor() r.bytes = make([]byte, 0) success = true return r, nil }
func (r *Lucene41PostingsReader) Close() error { return util.Close(r.docIn, r.posIn, r.payIn) }