Java Code Examples for org.apache.lucene.util.AttributeSource#addAttribute()
The following examples show how to use
org.apache.lucene.util.AttributeSource#addAttribute() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: ConcatenatingTokenStream.java From lucene-solr with Apache License 2.0 | 6 votes |
private static AttributeSource combineSources(TokenStream... sources) { AttributeSource base = sources[0].cloneAttributes(); try { for (int i = 1; i < sources.length; i++) { Iterator<Class<? extends Attribute>> it = sources[i].getAttributeClassesIterator(); while (it.hasNext()) { base.addAttribute(it.next()); } // check attributes can be captured sources[i].copyTo(base); } return base; } catch (IllegalArgumentException e) { throw new IllegalArgumentException("Attempted to concatenate TokenStreams with different attribute types", e); } }
Example 2
Source File: FuzzyTermsEnum.java From lucene-solr with Apache License 2.0 | 6 votes |
private FuzzyTermsEnum(Terms terms, AttributeSource atts, Term term, Supplier<FuzzyAutomatonBuilder> automatonBuilder) throws IOException { this.terms = terms; this.atts = atts; this.term = term; this.maxBoostAtt = atts.addAttribute(MaxNonCompetitiveBoostAttribute.class); this.boostAtt = atts.addAttribute(BoostAttribute.class); atts.addAttributeImpl(new AutomatonAttributeImpl()); AutomatonAttribute aa = atts.addAttribute(AutomatonAttribute.class); aa.init(automatonBuilder); this.automata = aa.getAutomata(); this.termLength = aa.getTermLength(); this.maxEdits = this.automata.length - 1; bottom = maxBoostAtt.getMaxNonCompetitiveBoost(); bottomTerm = maxBoostAtt.getCompetitiveTerm(); bottomChanged(null); }
Example 3
Source File: NumericTokenizer.java From Elasticsearch with Apache License 2.0 | 5 votes |
/** Make this tokenizer get attributes from the delegate token stream. */ private static final AttributeFactory delegatingAttributeFactory(final AttributeSource source) { return new AttributeFactory() { @Override public AttributeImpl createAttributeInstance(Class<? extends Attribute> attClass) { return (AttributeImpl) source.addAttribute(attClass); } }; }
Example 4
Source File: FieldInvertState.java From lucene-solr with Apache License 2.0 | 5 votes |
/** * Sets attributeSource to a new instance. */ void setAttributeSource(AttributeSource attributeSource) { if (this.attributeSource != attributeSource) { this.attributeSource = attributeSource; termAttribute = attributeSource.getAttribute(TermToBytesRefAttribute.class); termFreqAttribute = attributeSource.addAttribute(TermFrequencyAttribute.class); posIncrAttribute = attributeSource.addAttribute(PositionIncrementAttribute.class); offsetAttribute = attributeSource.addAttribute(OffsetAttribute.class); payloadAttribute = attributeSource.getAttribute(PayloadAttribute.class); } }
Example 5
Source File: ConditionalTokenFilter.java From lucene-solr with Apache License 2.0 | 4 votes |
public OneTimeWrapper(AttributeSource attributeSource) { super(attributeSource); this.offsetAtt = attributeSource.addAttribute(OffsetAttribute.class); this.posIncAtt = attributeSource.addAttribute(PositionIncrementAttribute.class); }
Example 6
Source File: GraphTokenFilter.java From lucene-solr with Apache License 2.0 | 4 votes |
Token(AttributeSource attSource) { this.attSource = attSource; this.posIncAtt = attSource.addAttribute(PositionIncrementAttribute.class); boolean hasLengthAtt = attSource.hasAttribute(PositionLengthAttribute.class); this.lengthAtt = hasLengthAtt ? attSource.addAttribute(PositionLengthAttribute.class) : null; }
Example 7
Source File: SimplePreAnalyzedParser.java From lucene-solr with Apache License 2.0 | 4 votes |
private static AttributeSource.State createState(AttributeSource a, Tok state, int tokenEnd) { a.clearAttributes(); CharTermAttribute termAtt = a.addAttribute(CharTermAttribute.class); char[] tokChars = state.token.toString().toCharArray(); termAtt.copyBuffer(tokChars, 0, tokChars.length); int tokenStart = tokenEnd - state.token.length(); for (Entry<String, String> e : state.attr.entrySet()) { String k = e.getKey(); if (k.equals("i")) { // position increment int incr = Integer.parseInt(e.getValue()); PositionIncrementAttribute posIncr = a.addAttribute(PositionIncrementAttribute.class); posIncr.setPositionIncrement(incr); } else if (k.equals("s")) { tokenStart = Integer.parseInt(e.getValue()); } else if (k.equals("e")) { tokenEnd = Integer.parseInt(e.getValue()); } else if (k.equals("y")) { TypeAttribute type = a.addAttribute(TypeAttribute.class); type.setType(e.getValue()); } else if (k.equals("f")) { FlagsAttribute flags = a.addAttribute(FlagsAttribute.class); int f = Integer.parseInt(e.getValue(), 16); flags.setFlags(f); } else if (k.equals("p")) { PayloadAttribute p = a.addAttribute(PayloadAttribute.class); byte[] data = hexToBytes(e.getValue()); if (data != null && data.length > 0) { p.setPayload(new BytesRef(data)); } } else { // unknown attribute } } // handle offset attr OffsetAttribute offset = a.addAttribute(OffsetAttribute.class); offset.setOffset(tokenStart, tokenEnd); State resState = a.captureState(); a.clearAttributes(); return resState; }
Example 8
Source File: MtasPreAnalyzedParser.java From mtas with Apache License 2.0 | 4 votes |
@Override public ParseResult parse(Reader reader, AttributeSource parent) throws IOException { ParseResult res = new ParseResult(); // get MtasUpdateRequestProcessorResult StringBuilder sb = new StringBuilder(); char[] buf = new char[128]; int cnt; while ((cnt = reader.read(buf)) > 0) { sb.append(buf, 0, cnt); } Iterator<MtasUpdateRequestProcessorResultItem> iterator; try ( MtasUpdateRequestProcessorResultReader result = new MtasUpdateRequestProcessorResultReader( sb.toString());) { iterator = result.getIterator(); if (iterator != null && iterator.hasNext()) { res.str = result.getStoredStringValue(); res.bin = result.getStoredBinValue(); } else { res.str = null; res.bin = null; result.close(); return res; } parent.clearAttributes(); while (iterator.hasNext()) { MtasUpdateRequestProcessorResultItem item = iterator.next(); if (item.tokenTerm != null) { CharTermAttribute catt = parent.addAttribute(CharTermAttribute.class); catt.append(item.tokenTerm); } if (item.tokenFlags != null) { FlagsAttribute flags = parent.addAttribute(FlagsAttribute.class); flags.setFlags(item.tokenFlags); } if (item.tokenPosIncr != null) { PositionIncrementAttribute patt = parent .addAttribute(PositionIncrementAttribute.class); patt.setPositionIncrement(item.tokenPosIncr); } if (item.tokenPayload != null) { PayloadAttribute p = parent.addAttribute(PayloadAttribute.class); p.setPayload(new BytesRef(item.tokenPayload)); } if (item.tokenOffsetStart != null && item.tokenOffsetEnd != null) { OffsetAttribute offset = parent.addAttribute(OffsetAttribute.class); offset.setOffset(item.tokenOffsetStart, item.tokenOffsetEnd); } // capture state and add to result State state = parent.captureState(); res.states.add(state.clone()); // reset for reuse parent.clearAttributes(); } } catch (IOException e) { // ignore log.debug(e); } return res; }