com.sun.xml.internal.fastinfoset.stax.StAXDocumentSerializer Java Examples
The following examples show how to use
com.sun.xml.internal.fastinfoset.stax.StAXDocumentSerializer.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: XML_SAX_StAX_FI.java From openjdk-8 with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #2
Source File: FastInfosetCodec.java From openjdk-8-source with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #3
Source File: XML_SAX_StAX_FI.java From openjdk-8-source with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #4
Source File: FastInfosetCodec.java From hottub with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #5
Source File: XML_SAX_StAX_FI.java From hottub with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #6
Source File: XML_SAX_StAX_FI.java From openjdk-jdk9 with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #7
Source File: FastInfosetCodec.java From openjdk-jdk9 with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #8
Source File: XML_SAX_StAX_FI.java From TencentKona-8 with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #9
Source File: FastInfosetCodec.java From openjdk-jdk8u-backup with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #10
Source File: XML_SAX_StAX_FI.java From openjdk-jdk8u-backup with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #11
Source File: FastInfosetCodec.java From openjdk-jdk8u with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #12
Source File: XML_SAX_StAX_FI.java From openjdk-jdk8u with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #13
Source File: FastInfosetCodec.java From jdk8u60 with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #14
Source File: FastInfosetCodec.java From openjdk-8 with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #15
Source File: XML_SAX_StAX_FI.java From jdk8u60 with GNU General Public License v2.0 | 6 votes |
public void parse(InputStream xml, OutputStream finf, String workingDirectory) throws Exception { StAXDocumentSerializer documentSerializer = new StAXDocumentSerializer(); documentSerializer.setOutputStream(finf); SAX2StAXWriter saxTostax = new SAX2StAXWriter(documentSerializer); SAXParserFactory saxParserFactory = SAXParserFactory.newInstance(); saxParserFactory.setNamespaceAware(true); SAXParser saxParser = saxParserFactory.newSAXParser(); XMLReader reader = saxParser.getXMLReader(); reader.setProperty("http://xml.org/sax/properties/lexical-handler", saxTostax); reader.setContentHandler(saxTostax); if (workingDirectory != null) { reader.setEntityResolver(createRelativePathResolver(workingDirectory)); } reader.parse(new InputSource(xml)); xml.close(); finf.close(); }
Example #16
Source File: FastInfosetCodec.java From TencentKona-8 with GNU General Public License v2.0 | 6 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState, int indexedStringSizeLimit, int stringsMemoryLimit) { StAXDocumentSerializer serializer = new StAXDocumentSerializer(out); if (retainState) { /** * Create a serializer vocabulary external to the serializer. * This will ensure that the vocabulary will never be cleared * for each serialization and will be retained (and will grow) * for each serialization */ SerializerVocabulary vocabulary = new SerializerVocabulary(); serializer.setVocabulary(vocabulary); serializer.setMinAttributeValueSize(0); serializer.setMaxAttributeValueSize(indexedStringSizeLimit); serializer.setMinCharacterContentChunkSize(0); serializer.setMaxCharacterContentChunkSize(indexedStringSizeLimit); serializer.setAttributeValueMapMemoryLimit(stringsMemoryLimit); serializer.setCharacterContentChunkMapMemoryLimit(stringsMemoryLimit); } return serializer; }
Example #17
Source File: FastInfosetStreamWriterOutput.java From openjdk-jdk8u with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out, NoEscapeHandler.theInstance); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #18
Source File: FastInfosetStreamWriterOutput.java From openjdk-jdk8u-backup with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out, NoEscapeHandler.theInstance); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #19
Source File: FastInfosetStreamWriterOutput.java From jdk8u60 with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #20
Source File: FastInfosetStreamWriterOutput.java From openjdk-jdk9 with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out, NoEscapeHandler.theInstance); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #21
Source File: FastInfosetStreamWriterOutput.java From openjdk-8-source with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #22
Source File: FastInfosetStreamWriterOutput.java From TencentKona-8 with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out, NoEscapeHandler.theInstance); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #23
Source File: FastInfosetStreamWriterOutput.java From hottub with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #24
Source File: FastInfosetStreamWriterOutput.java From openjdk-8 with GNU General Public License v2.0 | 5 votes |
public FastInfosetStreamWriterOutput(StAXDocumentSerializer out, JAXBContextImpl context) { super(out); this.fiout = out; this.localNames = context.getUTF8NameTable(); final VocabularyApplicationData vocabAppData = fiout.getVocabularyApplicationData(); AppData appData = null; if (vocabAppData == null || !(vocabAppData instanceof AppData)) { appData = new AppData(); fiout.setVocabularyApplicationData(appData); } else { appData = (AppData)vocabAppData; } final TablesPerJAXBContext tablesPerContext = appData.contexts.get(context); if (tablesPerContext != null) { tables = tablesPerContext; /** * Obtain the current local name index. Thus will be used to * calculate the maximum index value when serializing for this context */ tables.clearOrResetTables(out.getLocalNameIndex()); } else { tables = new TablesPerJAXBContext(context, out.getLocalNameIndex()); appData.contexts.put(context, tables); } }
Example #25
Source File: FastInfosetCodec.java From openjdk-8 with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }
Example #26
Source File: FastInfosetCodec.java From openjdk-jdk9 with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }
Example #27
Source File: FastInfosetCodec.java From openjdk-8-source with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }
Example #28
Source File: FastInfosetCodec.java From hottub with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }
Example #29
Source File: FastInfosetCodec.java From openjdk-jdk8u-backup with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }
Example #30
Source File: FastInfosetCodec.java From openjdk-jdk8u with GNU General Public License v2.0 | 2 votes |
/** * Create a new (@link StAXDocumentSerializer} instance. * * @param in the OutputStream to serialize to. * @param retainState if true the serializer should retain the state of * vocabulary tables for multiple serializations. * @return a new {@link StAXDocumentSerializer} instance. */ /* package */ static StAXDocumentSerializer createNewStreamWriter(OutputStream out, boolean retainState) { return createNewStreamWriter(out, retainState, DEFAULT_INDEXED_STRING_SIZE_LIMIT, DEFAULT_INDEXED_STRING_MEMORY_LIMIT); }