You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@phoenix.apache.org by suyash kharade <su...@gmail.com> on 2016/12/01 07:47:24 UTC
issue with phoenix udf
Hi,
I have just started using phoenix. I have implemented udf and deployed
in phoenix. Following is code snippet for udf:
import org.apache.phoenix.expression.function.ScalarFunction;
import org.apache.phoenix.parse.FunctionParseNode.Argument;
import org.apache.phoenix.parse.FunctionParseNode.BuiltInFunction;
import org.apache.phoenix.schema.tuple.Tuple;
import org.apache.phoenix.schema.types.PDataType;
import org.apache.phoenix.schema.types.PDouble;
import org.apache.phoenix.schema.types.PVarchar;
@BuiltInFunction(name = CalDistanceFunction.NAME, args = {
@Argument(allowedTypes = { PVarchar.class }),
@Argument(allowedTypes = { PVarchar.class }),
@Argument(allowedTypes = { PVarchar.class }) })
public class CalDistanceFunction extends ScalarFunction {
Logger LOG = Logger.getLogger(CalDistanceFunction.class);
public static final String NAME = "CALDIST";
private String startLat;
private String startLong;
public CalDistanceFunction(List<Expression> children) {
super(children);
init();
}
private void init() {
LOG.debug("init function");
LOG.debug("init array size " + getChildren().size());
Expression startLatExp = getChildren().get(0);
Expression startLongExp = getChildren().get(1);
if (startLatExp instanceof LiteralExpression
&& startLongExp instanceof LiteralExpression) {
startLat = ((LiteralExpression) startLatExp).getValue().toString();
startLong = ((LiteralExpression) startLongExp).getValue().toString();
}
}
@Override
public boolean evaluate(Tuple tuple, ImmutableBytesWritable ptr) {
if (!getChildren().get(2).evaluate(tuple, ptr)) {
return false;
}
String latLangStr = (String) PVarchar.INSTANCE.toObject(ptr,
getChildren().get(0).getSortOrder());
String[] splits = latLangStr.split(",");
String latStr = splits[0];
String longStr = splits[1];
if (latStr == null || longStr == null) {
LOG.info("returning null");
return false;
}
double latValue, longValue, startLatValue, startLongValue;
try {
latValue = Double.parseDouble(latStr);
longValue = Double.parseDouble(longStr);
startLatValue = Double.parseDouble(startLat);
startLongValue = Double.parseDouble(startLong);
} catch (NumberFormatException e) {
//LOG.error("evaluation failure", e);
ptr.set(PDouble.INSTANCE.toBytes(Double.MAX_VALUE));
return true;
}
double answer =
Math.sqrt(Math.pow(111.699 * (latValue - startLatValue), 2) +
Math.pow(
111.699 * (startLongValue - longValue) * Math.cos(latValue /
57.3),
2));
LOG.debug("dz distance calculated : " + answer);
ptr.set(PDouble.INSTANCE.toBytes(answer));
return true;
}
@Override
public PDataType getDataType() {
return PDouble.INSTANCE;
}
@Override
public String getName() {
return NAME;
}
}
It woking fine with normal select query
select latitude, longitude, CALDIST('28.1','72.3',
latitude||','||longitude) as dist FROM LOCATION.USER_LOCATIONS_LATEST
but it is giving exception for query with where clause
select latitude, longitude, CALDIST('28.1','72.3',
latitude||','||longitude) as dist FROM LOCATION.USER_LOCATIONS_LATEST where
CALDIST('28.1','72.3', latitude||','||longitude) > 700.0
and exception is
java.util.concurrent.ExecutionException:
org.apache.phoenix.schema.TypeMismatchException: ERROR 203 (22005): Type
mismatch. ERROR 203 (22005): Type mismatch. DOUBLE cannot be coerced to
VARCHAR
at java.util.concurrent.FutureTask.report(FutureTask.java:122)
at java.util.concurrent.FutureTask.get(FutureTask.java:192)
at
org.apache.phoenix.iterate.BaseResultIterators.close(BaseResultIterators.java:802)
at
org.apache.phoenix.iterate.BaseResultIterators.getIterators(BaseResultIterators.java:739)
at
org.apache.phoenix.iterate.BaseResultIterators.getIterators(BaseResultIterators.java:638)
at
org.apache.phoenix.iterate.RoundRobinResultIterator.getIterators(RoundRobinResultIterator.java:176)
at
org.apache.phoenix.iterate.RoundRobinResultIterator.next(RoundRobinResultIterator.java:91)
at org.apache.phoenix.jdbc.PhoenixResultSet.next(PhoenixRe
Thanks,
Suyash