Java Code Examples for org.apache.hadoop.io.FloatWritable#set()
The following examples show how to use
org.apache.hadoop.io.FloatWritable#set() .
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: VirtualProgressReporter.java From Cubert with Apache License 2.0 | 6 votes |
/** * Set the progress of the current task. * *Note: Works only when using a Virtual Input Format * * @param value value of the progress must lie within [0.0, 1.0] */ public static void setProgress(float value) { if (PhaseContext.isIntialized()) { final MapContext mapContext = PhaseContext.getMapContext(); try { final FloatWritable progress = (FloatWritable) mapContext.getCurrentKey(); progress.set(value); mapContext.nextKeyValue(); } catch (Exception e) { System.err.println("Unable to report progress in Load Cyclic. Exception: " + e); e.printStackTrace(); } } }
Example 2
Source File: FactorizationMachineUDTF.java From incubator-hivemall with Apache License 2.0 | 5 votes |
private void forwardAsIntFeature(@Nonnull final FactorizationMachineModel model, final int factors) throws HiveException { final IntWritable f_idx = new IntWritable(0); final FloatWritable f_Wi = new FloatWritable(0.f); final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f); final Object[] forwardObjs = new Object[3]; forwardObjs[0] = f_idx; forwardObjs[1] = f_Wi; forwardObjs[2] = null; // W0 f_idx.set(0); f_Wi.set(model.getW0()); // V0 is null forward(forwardObjs); // Wi, Vif (i starts from 1..P) forwardObjs[2] = Arrays.asList(f_Vi); for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) { final float[] vi = model.getV(i, false); if (vi == null) { continue; } f_idx.set(i); // set Wi final float w = model.getW(i); f_Wi.set(w); // set Vif for (int f = 0; f < factors; f++) { float v = vi[f]; f_Vi[f].set(v); } forward(forwardObjs); } }
Example 3
Source File: FactorizationMachineUDTF.java From incubator-hivemall with Apache License 2.0 | 5 votes |
private void forwardAsStringFeature(@Nonnull final FMStringFeatureMapModel model, final int factors) throws HiveException { final Text feature = new Text(); final FloatWritable f_Wi = new FloatWritable(0.f); final FloatWritable[] f_Vi = HiveUtils.newFloatArray(factors, 0.f); final Object[] forwardObjs = new Object[3]; forwardObjs[0] = feature; forwardObjs[1] = f_Wi; forwardObjs[2] = null; // W0 feature.set("0"); f_Wi.set(model.getW0()); // V0 is null forward(forwardObjs); // Wi, Vif (i starts from 1..P) forwardObjs[2] = Arrays.asList(f_Vi); for (Map.Entry<String, Entry> e : Fastutil.fastIterable(model.getMap())) { String i = e.getKey(); assert (i != null); // set i feature.set(i); Entry entry = e.getValue(); // set Wi f_Wi.set(entry.W); // set Vif final float[] Vi = entry.Vf; for (int f = 0; f < factors; f++) { float v = Vi[f]; f_Vi[f].set(v); } forward(forwardObjs); } }
Example 4
Source File: ProbabilisticTopicModelBaseUDTF.java From incubator-hivemall with Apache License 2.0 | 5 votes |
protected void forwardModel() throws HiveException { final IntWritable topicIdx = new IntWritable(); final Text word = new Text(); final FloatWritable score = new FloatWritable(); final Object[] forwardObjs = new Object[3]; forwardObjs[0] = topicIdx; forwardObjs[1] = word; forwardObjs[2] = score; for (int k = 0; k < topics; k++) { topicIdx.set(k); final SortedMap<Float, List<String>> topicWords = model.getTopicWords(k); if (topicWords == null) { continue; } for (Map.Entry<Float, List<String>> e : topicWords.entrySet()) { score.set(e.getKey().floatValue()); for (String v : e.getValue()) { word.set(v); forward(forwardObjs); } } } logger.info("Forwarded topic words each of " + topics + " topics"); }
Example 5
Source File: CrawlDbReader.java From anthelion with Apache License 2.0 | 5 votes |
public void reduce(FloatWritable key, Iterator<Text> values, OutputCollector<FloatWritable, Text> output, Reporter reporter) throws IOException { while (values.hasNext() && count < topN) { key.set(-key.get()); output.collect(key, values.next()); count++; } }
Example 6
Source File: Step1.java From recsys-offline with Apache License 2.0 | 5 votes |
public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { VarLongWritable userID = new VarLongWritable(); LongWritable itemID = new LongWritable(); FloatWritable itemValue = new FloatWritable();System.err.println("key:"+key+" value:"+value+" "); String line = value.toString(); String[] info = line.split(","); if (info.length != 3) { return; } userID.set(Long.parseLong(info[0])); itemID.set(Long.parseLong(info[1])); itemValue.set(Float.parseFloat(info[2])); context.write(userID, new LongAndFloat(itemID, itemValue)); }
Example 7
Source File: CrawlDbReader.java From nutch-htmlunit with Apache License 2.0 | 5 votes |
public void reduce(FloatWritable key, Iterator<Text> values, OutputCollector<FloatWritable, Text> output, Reporter reporter) throws IOException { while (values.hasNext() && count < topN) { key.set(-key.get()); output.collect(key, values.next()); count++; } }
Example 8
Source File: CustomWritable.java From pxf with Apache License 2.0 | 4 votes |
@Override public void write(DataOutput out) throws IOException { // 0. Timestamp Text tms_text = new Text(tms); tms_text.write(out); // 1. num, int1, int2 IntWritable intw = new IntWritable(); for (int i = 0; i < num.length; i++) { intw.set(num[i]); intw.write(out); } intw.set(int1); intw.write(out); intw.set(int2); intw.write(out); // 2. st1 Text txt = new Text(); for (int i = 0; i < strings.length; i++) { txt.set(strings[i]); txt.write(out); } txt.set(st1); txt.write(out); // 3. doubles DoubleWritable dw = new DoubleWritable(); for (int i = 0; i < dubs.length; i++) { dw.set(dubs[i]); dw.write(out); } dw.set(db); dw.write(out); // 4. floats FloatWritable fw = new FloatWritable(); for (int i = 0; i < fts.length; i++) { fw.set(fts[i]); fw.write(out); } fw.set(ft); fw.write(out); // 5. longs LongWritable lw = new LongWritable(); for (int i = 0; i < lngs.length; i++) { lw.set(lngs[i]); lw.write(out); } lw.set(lng); lw.write(out); // 6. booleans BooleanWritable bw = new BooleanWritable(); for (int i = 0; i < bools.length; ++i) { bw.set(bools[i]); bw.write(out); } bw.set(bool); bw.write(out); // 7. shorts ShortWritable sw = new ShortWritable(); for (int i = 0; i < shrts.length; ++i) { sw.set(shrts[i]); sw.write(out); } sw.set(shrt); sw.write(out); // 8. bytes // BytesWritable btsw = new BytesWritable(bts); // btsw.write(out); BytesWritable btsw = new BytesWritable(); btsw.setCapacity(bts.length); btsw.setSize(bts.length); btsw.set(bts, 0, bts.length); btsw.write(out); }
Example 9
Source File: TestPipeApplication.java From hadoop with Apache License 2.0 | 4 votes |
@Override public boolean next(FloatWritable key, NullWritable value) throws IOException { key.set(index++); return index <= 10; }
Example 10
Source File: TestPipeApplication.java From big-c with Apache License 2.0 | 4 votes |
@Override public boolean next(FloatWritable key, NullWritable value) throws IOException { key.set(index++); return index <= 10; }
Example 11
Source File: OnlineMatrixFactorizationUDTF.java From incubator-hivemall with Apache License 2.0 | 4 votes |
@Override public void close() throws HiveException { if (model != null) { if (count == 0) { this.model = null; // help GC return; } if (iterations > 1) { runIterativeTraining(iterations); } final IntWritable idx = new IntWritable(); final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f); final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f); final FloatWritable Bu = new FloatWritable(); final FloatWritable Bi = new FloatWritable(); final Object[] forwardObj; if (updateMeanRating) { assert useBiasClause; float meanRating = model.getMeanRating(); FloatWritable mu = new FloatWritable(meanRating); forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi, mu}; } else { if (useBiasClause) { forwardObj = new Object[] {idx, Pu, Qi, Bu, Bi}; } else { forwardObj = new Object[] {idx, Pu, Qi}; } } int numForwarded = 0; for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) { idx.set(i); Rating[] userRatings = model.getUserVector(i); if (userRatings == null) { forwardObj[1] = null; } else { forwardObj[1] = Pu; copyTo(userRatings, Pu); } Rating[] itemRatings = model.getItemVector(i); if (itemRatings == null) { forwardObj[2] = null; } else { forwardObj[2] = Qi; copyTo(itemRatings, Qi); } if (useBiasClause) { Bu.set(model.getUserBias(i)); Bi.set(model.getItemBias(i)); } forward(forwardObj); numForwarded++; } this.model = null; // help GC logger.info("Forwarded the prediction model of " + numForwarded + " rows. [totalErrors=" + cvState.getTotalErrors() + ", lastLosses=" + cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]"); } }
Example 12
Source File: BPRMatrixFactorizationUDTF.java From incubator-hivemall with Apache License 2.0 | 4 votes |
@Override public void close() throws HiveException { if (model != null) { if (count == 0) { this.model = null; // help GC return; } if (iterations > 1) { runIterativeTraining(iterations); } final IntWritable idx = new IntWritable(); final FloatWritable[] Pu = HiveUtils.newFloatArray(factor, 0.f); final FloatWritable[] Qi = HiveUtils.newFloatArray(factor, 0.f); final FloatWritable Bi = useBiasClause ? new FloatWritable() : null; final Object[] forwardObj = new Object[] {idx, Pu, Qi, Bi}; int numForwarded = 0; for (int i = model.getMinIndex(), maxIdx = model.getMaxIndex(); i <= maxIdx; i++) { idx.set(i); Rating[] userRatings = model.getUserVector(i); if (userRatings == null) { forwardObj[1] = null; } else { forwardObj[1] = Pu; copyTo(userRatings, Pu); } Rating[] itemRatings = model.getItemVector(i); if (itemRatings == null) { forwardObj[2] = null; } else { forwardObj[2] = Qi; copyTo(itemRatings, Qi); } if (useBiasClause) { Bi.set(model.getItemBias(i)); } forward(forwardObj); numForwarded++; } this.model = null; // help GC LOG.info("Forwarded the prediction model of " + numForwarded + " rows. [lastLosses=" + cvState.getCumulativeLoss() + ", #trainingExamples=" + count + "]"); } }
Example 13
Source File: FieldAwareFactorizationMachineUDTF.java From incubator-hivemall with Apache License 2.0 | 4 votes |
@Override protected void forwardModel() throws HiveException { this._model = null; this._fieldList = null; this._sumVfX = null; final int factors = _factors; final IntWritable idx = new IntWritable(); final FloatWritable Wi = new FloatWritable(0.f); final FloatWritable[] Vi = HiveUtils.newFloatArray(factors, 0.f); final List<FloatWritable> ViObj = Arrays.asList(Vi); final Object[] forwardObjs = new Object[4]; String modelId = HadoopUtils.getUniqueTaskIdString(); forwardObjs[0] = new Text(modelId); forwardObjs[1] = idx; forwardObjs[2] = Wi; forwardObjs[3] = null; // Vi // W0 idx.set(0); Wi.set(_ffmModel.getW0()); forward(forwardObjs); final Entry entryW = new Entry(_ffmModel._buf, 1); final Entry entryV = new Entry(_ffmModel._buf, factors); final float[] Vf = new float[factors]; for (Int2LongMap.Entry e : Fastutil.fastIterable(_ffmModel._map)) { // set i final int i = e.getIntKey(); idx.set(i); final long offset = e.getLongValue(); if (Entry.isEntryW(i)) {// set Wi entryW.setOffset(offset); float w = entryW.getW(); if (w == 0.f) { continue; // skip w_i=0 } Wi.set(w); forwardObjs[2] = Wi; forwardObjs[3] = null; } else {// set Vif entryV.setOffset(offset); entryV.getV(Vf); for (int f = 0; f < factors; f++) { Vi[f].set(Vf[f]); } forwardObjs[2] = null; forwardObjs[3] = ViObj; } forward(forwardObjs); } }