/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package hivemall.classifier; import hivemall.LearnerBaseUDTF; import hivemall.model.FeatureValue; import hivemall.model.IWeightValue; import hivemall.model.PredictionModel; import hivemall.model.PredictionResult; import hivemall.model.WeightValue; import hivemall.model.WeightValue.WeightValueWithCovar; import hivemall.utils.collections.IMapIterator; import hivemall.utils.hadoop.HiveUtils; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.UDFArgumentException; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.io.FloatWritable; public abstract class BinaryOnlineClassifierUDTF extends LearnerBaseUDTF { private static final Log logger = LogFactory.getLog(BinaryOnlineClassifierUDTF.class); private ListObjectInspector featureListOI; private PrimitiveObjectInspector labelOI; private boolean parseFeature; protected PredictionModel model; protected int count; @Override public StructObjectInspector initialize(ObjectInspector[] argOIs) throws UDFArgumentException { if (argOIs.length < 2) { throw new UDFArgumentException( getClass().getSimpleName() + " takes 2 arguments: List<Int|BigInt|Text> features, int label [, constant string options]"); } PrimitiveObjectInspector featureInputOI = processFeaturesOI(argOIs[0]); this.labelOI = HiveUtils.asIntCompatibleOI(argOIs[1]); processOptions(argOIs); PrimitiveObjectInspector featureOutputOI = dense_model ? PrimitiveObjectInspectorFactory.javaIntObjectInspector : featureInputOI; this.model = createModel(); if (preloadedModelFile != null) { loadPredictionModel(model, preloadedModelFile, featureOutputOI); } this.count = 0; return getReturnOI(featureOutputOI); } protected PrimitiveObjectInspector processFeaturesOI(ObjectInspector arg) throws UDFArgumentException { this.featureListOI = (ListObjectInspector) arg; ObjectInspector featureRawOI = featureListOI.getListElementObjectInspector(); HiveUtils.validateFeatureOI(featureRawOI); this.parseFeature = HiveUtils.isStringOI(featureRawOI); return HiveUtils.asPrimitiveObjectInspector(featureRawOI); } protected StructObjectInspector getReturnOI(ObjectInspector featureRawOI) { ArrayList<String> fieldNames = new ArrayList<String>(); ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>(); fieldNames.add("feature"); ObjectInspector featureOI = ObjectInspectorUtils.getStandardObjectInspector(featureRawOI); fieldOIs.add(featureOI); fieldNames.add("weight"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector); if (useCovariance()) { fieldNames.add("covar"); fieldOIs.add(PrimitiveObjectInspectorFactory.writableFloatObjectInspector); } return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs); } @Override public void process(Object[] args) throws HiveException { List<?> features = (List<?>) featureListOI.getList(args[0]); FeatureValue[] featureVector = parseFeatures(features); if (featureVector == null) { return; } int label = PrimitiveObjectInspectorUtils.getInt(args[1], labelOI); checkLabelValue(label); count++; train(featureVector, label); } @Nullable protected final FeatureValue[] parseFeatures(@Nonnull final List<?> features) { final int size = features.size(); if (size == 0) { return null; } final ObjectInspector featureInspector = featureListOI.getListElementObjectInspector(); final FeatureValue[] featureVector = new FeatureValue[size]; for (int i = 0; i < size; i++) { Object f = features.get(i); if (f == null) { continue; } final FeatureValue fv; if (parseFeature) { fv = FeatureValue.parse(f); } else { Object k = ObjectInspectorUtils.copyToStandardObject(f, featureInspector); fv = new FeatureValue(k, 1.f); } featureVector[i] = fv; } return featureVector; } protected void checkLabelValue(int label) throws UDFArgumentException { assert (label == -1 || label == 0 || label == 1) : label; } void train(List<?> features, int label) { FeatureValue[] featureVector = parseFeatures(features); train(featureVector, label); } protected void train(@Nonnull final FeatureValue[] features, final int label) { final float y = label > 0 ? 1f : -1f; final float p = predict(features); final float z = p * y; if (z <= 0.f) { // miss labeled update(features, y, p); } } protected float predict(@Nonnull final FeatureValue[] features) { float score = 0.f; for (FeatureValue f : features) {// a += w[i] * x[i] if (f == null) { continue; } Object k = f.getFeature(); float old_w = model.getWeight(k); if (old_w != 0.f) { float v = f.getValueAsFloat(); score += (old_w * v); } } return score; } @Nonnull protected PredictionResult calcScoreAndNorm(@Nonnull final FeatureValue[] features) { float score = 0.f; float squared_norm = 0.f; for (FeatureValue f : features) {// a += w[i] * x[i] if (f == null) { continue; } final Object k = f.getFeature(); final float v = f.getValueAsFloat(); float old_w = model.getWeight(k); if (old_w != 0f) { score += (old_w * v); } squared_norm += (v * v); } return new PredictionResult(score).squaredNorm(squared_norm); } @Nonnull protected PredictionResult calcScoreAndVariance(@Nonnull final FeatureValue[] features) { float score = 0.f; float variance = 0.f; for (FeatureValue f : features) {// a += w[i] * x[i] if (f == null) { continue; } final Object k = f.getFeature(); final float v = f.getValueAsFloat(); IWeightValue old_w = model.get(k); if (old_w == null) { variance += (1.f * v * v); } else { score += (old_w.get() * v); variance += (old_w.getCovariance() * v * v); } } return new PredictionResult(score).variance(variance); } protected void update(@Nonnull final FeatureValue[] features, float y, float p) { throw new IllegalStateException("update() should not be called"); } protected void update(@Nonnull final FeatureValue[] features, final float coeff) { for (FeatureValue f : features) {// w[f] += y * x[f] if (f == null) { continue; } final Object k = f.getFeature(); final float v = f.getValueAsFloat(); float old_w = model.getWeight(k); float new_w = old_w + (coeff * v); model.set(k, new WeightValue(new_w)); } } @Override public final void close() throws HiveException { super.close(); if (model != null) { int numForwarded = 0; if (useCovariance()) { final WeightValueWithCovar probe = new WeightValueWithCovar(); final Object[] forwardMapObj = new Object[3]; final FloatWritable fv = new FloatWritable(); final FloatWritable cov = new FloatWritable(); final IMapIterator<Object, IWeightValue> itor = model.entries(); while (itor.next() != -1) { itor.getValue(probe); if (!probe.isTouched()) { continue; // skip outputting untouched weights } Object k = itor.getKey(); fv.set(probe.get()); cov.set(probe.getCovariance()); forwardMapObj[0] = k; forwardMapObj[1] = fv; forwardMapObj[2] = cov; forward(forwardMapObj); numForwarded++; } } else { final WeightValue probe = new WeightValue(); final Object[] forwardMapObj = new Object[2]; final FloatWritable fv = new FloatWritable(); final IMapIterator<Object, IWeightValue> itor = model.entries(); while (itor.next() != -1) { itor.getValue(probe); if (!probe.isTouched()) { continue; // skip outputting untouched weights } Object k = itor.getKey(); fv.set(probe.get()); forwardMapObj[0] = k; forwardMapObj[1] = fv; forward(forwardMapObj); numForwarded++; } } long numMixed = model.getNumMixed(); this.model = null; logger.info("Trained a prediction model using " + count + " training examples" + (numMixed > 0 ? "( numMixed: " + numMixed + " )" : "")); logger.info("Forwarded the prediction model of " + numForwarded + " rows"); } } }