You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@chukwa.apache.org by ey...@apache.org on 2011/01/23 22:00:50 UTC
svn commit: r1062531 - in /incubator/chukwa/trunk: ./
src/java/org/apache/hadoop/chukwa/datastore/
src/java/org/apache/hadoop/chukwa/hicc/
src/java/org/apache/hadoop/chukwa/hicc/rest/ src/web/hicc/jsp/
Author: eyang
Date: Sun Jan 23 21:00:49 2011
New Revision: 1062531
URL: http://svn.apache.org/viewvc?rev=1062531&view=rev
Log:
CHUKWA-569. Added temperory fix for load meta data faster for graph_explorer.jsp. (Eric Yang)
Modified:
incubator/chukwa/trunk/CHANGES.txt
incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/Chart.java
incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
incubator/chukwa/trunk/src/web/hicc/jsp/host_selector_dropdown.jsp
Modified: incubator/chukwa/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/CHANGES.txt?rev=1062531&r1=1062530&r2=1062531&view=diff
==============================================================================
--- incubator/chukwa/trunk/CHANGES.txt (original)
+++ incubator/chukwa/trunk/CHANGES.txt Sun Jan 23 21:00:49 2011
@@ -32,6 +32,8 @@ Trunk (unreleased changes)
IMPROVEMENTS
+ CHUKWA-569. Added temperory fix for load meta data faster for graph_explorer.jsp. (Eric Yang)
+
CHUKWA-573. Setup ChukwaMetrics to send to port 9097 and use ChukwaMetricsProcessor for parsing. (Eric Yang)
CHUKWA-572. Changed disk and network stats to gauge instead of counter. (Eric Yang)
Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java?rev=1062531&r1=1062530&r2=1062531&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java (original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/datastore/ChukwaHBaseStore.java Sun Jan 23 21:00:49 2011
@@ -70,7 +70,12 @@ public class ChukwaHBaseStore {
Result result = it.next();
String temp = new String(result.getCellValue().getValue());
double value = Double.parseDouble(temp);
- series.add(result.getCellValue().getTimestamp(), value);
+ // TODO: Pig Store function does not honor HBase timestamp, hence need to parse rowKey for timestamp.
+ String buf = new String(result.getRow());
+ Long timestamp = Long.parseLong(buf.split("-")[0]);
+ // If Pig Store function can honor HBase timestamp, use the following line is better.
+ // series.add(result.getCellValue().getTimestamp(), value);
+ series.add(timestamp, value);
}
results.close();
table.close();
@@ -110,22 +115,45 @@ public class ChukwaHBaseStore {
}
return tableNames;
}
+
+ public static void getColumnNamesHelper(Set<String>columnNames, Iterator<Result> it) {
+ Result result = it.next();
+ List<KeyValue> kvList = result.list();
+ for(KeyValue kv : kvList) {
+ columnNames.add(new String(kv.getColumn()));
+ }
+ }
- public static Set<String> getColumnNames(String tableName, String family, long startTime, long endTime) {
+ public static Set<String> getColumnNames(String tableName, String family, long startTime, long endTime, boolean fullScan) {
Set<String> columnNames = new CopyOnWriteArraySet<String>();
try {
HTable table = pool.getTable(tableName);
Scan scan = new Scan();
- scan.setTimeRange(startTime, endTime);
+ if(!fullScan) {
+ // Take sample columns of the starting time.
+ StringBuilder temp = new StringBuilder();
+ temp.append(startTime);
+ scan.setStartRow(temp.toString().getBytes());
+ temp.setLength(0);
+ temp.append(startTime+60000L);
+ scan.setStopRow(temp.toString().getBytes());
+ } else {
+ StringBuilder temp = new StringBuilder();
+ temp.append(startTime);
+ scan.setStartRow(temp.toString().getBytes());
+ temp.setLength(0);
+ temp.append(endTime);
+ scan.setStopRow(temp.toString().getBytes());
+ }
scan.addFamily(family.getBytes());
ResultScanner results = table.getScanner(scan);
Iterator<Result> it = results.iterator();
- while(it.hasNext()) {
- Result result = it.next();
- List<KeyValue> kvList = result.list();
- for(KeyValue kv : kvList) {
- columnNames.add(new String(kv.getColumn()));
- }
+ if(fullScan) {
+ while(it.hasNext()) {
+ getColumnNamesHelper(columnNames, it);
+ }
+ } else {
+ getColumnNamesHelper(columnNames, it);
}
results.close();
table.close();
@@ -135,13 +163,27 @@ public class ChukwaHBaseStore {
return columnNames;
}
- public static Set<String> getRowNames(String tableName, String column, long startTime, long endTime) {
+ public static Set<String> getRowNames(String tableName, String column, long startTime, long endTime, boolean fullScan) {
Set<String> rows = new HashSet<String>();
HTable table = pool.getTable(tableName);
try {
Scan scan = new Scan();
scan.addColumn(column.getBytes());
- scan.setTimeRange(startTime, endTime);
+ if(!fullScan) {
+ StringBuilder temp = new StringBuilder();
+ temp.append(startTime);
+ scan.setStartRow(temp.toString().getBytes());
+ temp.setLength(0);
+ temp.append(startTime+60000L);
+ scan.setStopRow(temp.toString().getBytes());
+ } else {
+ StringBuilder temp = new StringBuilder();
+ temp.append(startTime);
+ scan.setStartRow(temp.toString().getBytes());
+ temp.setLength(0);
+ temp.append(endTime);
+ scan.setStopRow(temp.toString().getBytes());
+ }
ResultScanner results = table.getScanner(scan);
Iterator<Result> it = results.iterator();
while(it.hasNext()) {
@@ -150,7 +192,7 @@ public class ChukwaHBaseStore {
String[] parts = buffer.split("-", 2);
if(!rows.contains(parts[1])) {
rows.add(parts[1]);
- }
+ }
}
results.close();
table.close();
@@ -160,8 +202,8 @@ public class ChukwaHBaseStore {
return rows;
}
- public static Set<String> getHostnames(String cluster, long startTime, long endTime) {
- return getRowNames("SystemMetrics","system:csource", startTime, endTime);
+ public static Set<String> getHostnames(String cluster, long startTime, long endTime, boolean fullScan) {
+ return getRowNames("SystemMetrics","system:csource", startTime, endTime, fullScan);
}
public static Set<String> getClusterNames(long startTime, long endTime) {
Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/Chart.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/Chart.java?rev=1062531&r1=1062530&r2=1062531&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/Chart.java (original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/Chart.java Sun Jan 23 21:00:49 2011
@@ -281,7 +281,8 @@ public class Chart {
output.append(" clickable: true,\n");
output.append(" hoverable: true,\n");
output.append(" tickColor: \"#C0C0C0\",\n");
- output.append(" backgroundColor:\"#FFFFFF\"\n");
+ output.append(" borderWidth: 0,\n");
+ output.append(" backgroundColor:\"#F9F9F9\"\n");
output.append(" },\n");
output.append(" legend: { show: " + this.legend
+ ", noColumns: 3, container: $(\"#placeholderLegend\") },\n");
@@ -370,15 +371,15 @@ public class Chart {
if ((counter != 0) || (i != 0)) {
output.append(",");
}
- String param = "fill: false";
+ String param = "fill: false, lineWidth: 1";
String type = "lines";
if (this.chartType.get(i).intern() == "stack-area".intern()
|| this.chartType.get(i).intern() == "area".intern()) {
- param = "fill: true";
+ param = "fill: true, lineWidth: 0";
}
if (this.chartType.get(i).intern() == "bar".intern()) {
type = "bars";
- param = "stepByStep: true";
+ param = "stepByStep: true, lineWidth: 0";
}
if (this.chartType.get(i).intern() == "point".intern()) {
type = "points";
Modified: incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java?rev=1062531&r1=1062530&r2=1062531&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java (original)
+++ incubator/chukwa/trunk/src/java/org/apache/hadoop/chukwa/hicc/rest/MetricsController.java Sun Jan 23 21:00:49 2011
@@ -7,6 +7,7 @@ import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
+import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -130,7 +131,7 @@ public class MetricsController {
@GET
@Path("schema/{table}/{family}")
@Produces("application/json")
- public String getColumnNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @QueryParam("start") String start, @QueryParam("end") String end) {
+ public String getColumnNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @QueryParam("start") String start, @QueryParam("end") String end, @DefaultValue("false") @QueryParam("fullScan") boolean fullScan) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String buffer = "";
Series series;
@@ -152,7 +153,7 @@ public class MetricsController {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity("Start/End date parse error. Format: yyyyMMddHHmmss.").build());
}
- Set<String> columnNames = ChukwaHBaseStore.getColumnNames(tableName, family, startTime, endTime);
+ Set<String> columnNames = ChukwaHBaseStore.getColumnNames(tableName, family, startTime, endTime, fullScan);
JSONArray columns = new JSONArray();
for(String column : columnNames) {
columns.add(column);
@@ -163,7 +164,7 @@ public class MetricsController {
@GET
@Path("rowkey/{table}/{column}")
@Produces("application/json")
- public String getRowNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @PathParam("column") String column, @QueryParam("start") String start, @QueryParam("end") String end) {
+ public String getRowNames(@Context HttpServletRequest request, @PathParam("table") String tableName, @PathParam("family") String family, @PathParam("column") String column, @QueryParam("start") String start, @QueryParam("end") String end, @QueryParam("fullScan") @DefaultValue("false") boolean fullScan) {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMddHHmmss");
String buffer = "";
Series series;
@@ -185,7 +186,7 @@ public class MetricsController {
throw new WebApplicationException(Response.status(Response.Status.BAD_REQUEST)
.entity("Start/End date parse error. Format: yyyyMMddHHmmss.").build());
}
- Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, column, startTime, endTime);
+ Set<String> columnNames = ChukwaHBaseStore.getRowNames(tableName, column, startTime, endTime, fullScan);
JSONArray rows = new JSONArray();
for(String row : columnNames) {
rows.add(row);
Modified: incubator/chukwa/trunk/src/web/hicc/jsp/host_selector_dropdown.jsp
URL: http://svn.apache.org/viewvc/incubator/chukwa/trunk/src/web/hicc/jsp/host_selector_dropdown.jsp?rev=1062531&r1=1062530&r2=1062531&view=diff
==============================================================================
--- incubator/chukwa/trunk/src/web/hicc/jsp/host_selector_dropdown.jsp (original)
+++ incubator/chukwa/trunk/src/web/hicc/jsp/host_selector_dropdown.jsp Sun Jan 23 21:00:49 2011
@@ -72,7 +72,7 @@
}
} catch (NullPointerException e) {
}
- Set<String> machines = ChukwaHBaseStore.getHostnames(cluster, time.getStartTime(), time.getEndTime());
+ Set<String> machines = ChukwaHBaseStore.getHostnames(cluster, time.getStartTime(), time.getEndTime(), false);
for(String machine : machines) {
if(hosts.containsKey(machine)) {
out.println("<option selected>"+machine+"</option>");