Skip to content

Commit

Permalink
pinpoint-apm#472 change hbase 1.0 api
Browse files Browse the repository at this point in the history
  • Loading branch information
emeroad committed May 27, 2015
1 parent 55b8b28 commit 2325a7b
Show file tree
Hide file tree
Showing 32 changed files with 150 additions and 271 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ public void insert(String agentId, String applicationName) {
byte[] appNameByte = Bytes.toBytes(applicationName);

Put put = new Put(agentIdByte);
put.add(AGENTID_APPLICATION_INDEX_CF_APPLICATION, appNameByte, appNameByte);
put.addColumn(AGENTID_APPLICATION_INDEX_CF_APPLICATION, appNameByte, appNameByte);

hbaseTemplate.put(AGENTID_APPLICATION_INDEX, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ public void insert(TAgentInfo agentInfo) {
// should add additional agent informations. for now added only starttime for sqlMetaData
AgentInfoBo agentInfoBo = this.agentInfoBoMapper.map(agentInfo);
byte[] agentInfoBoValue = agentInfoBo.writeValue();
put.add(HBaseTables.AGENTINFO_CF_INFO, HBaseTables.AGENTINFO_CF_INFO_IDENTIFIER, agentInfoBoValue);
put.addColumn(HBaseTables.AGENTINFO_CF_INFO, HBaseTables.AGENTINFO_CF_INFO_IDENTIFIER, agentInfoBoValue);

if (agentInfo.isSetServerMetaData()) {
ServerMetaDataBo serverMetaDataBo = this.serverMetaDataBoMapper.map(agentInfo.getServerMetaData());
byte[] serverMetaDataBoValue = serverMetaDataBo.writeValue();
put.add(HBaseTables.AGENTINFO_CF_INFO, HBaseTables.AGENTINFO_CF_INFO_SERVER_META_DATA, serverMetaDataBoValue);
put.addColumn(HBaseTables.AGENTINFO_CF_INFO, HBaseTables.AGENTINFO_CF_INFO_SERVER_META_DATA, serverMetaDataBoValue);
}

hbaseTemplate.put(HBaseTables.AGENTINFO, put);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,10 +68,10 @@ public void insert(final TAgentStat agentStat) {
Put put = new Put(key);

final AgentStatMemoryGcBo agentStatMemoryGcBo = this.agentStatMemoryGcBoMapper.map(agentStat);
put.add(AGENT_STAT_CF_STATISTICS, AGENT_STAT_CF_STATISTICS_MEMORY_GC, timestamp, agentStatMemoryGcBo.writeValue());
put.addColumn(AGENT_STAT_CF_STATISTICS, AGENT_STAT_CF_STATISTICS_MEMORY_GC, timestamp, agentStatMemoryGcBo.writeValue());

final AgentStatCpuLoadBo agentStatCpuLoadBo = this.agentStatCpuLoadBoMapper.map(agentStat);
put.add(AGENT_STAT_CF_STATISTICS, AGENT_STAT_CF_STATISTICS_CPU_LOAD, timestamp, agentStatCpuLoadBo.writeValue());
put.addColumn(AGENT_STAT_CF_STATISTICS, AGENT_STAT_CF_STATISTICS_CPU_LOAD, timestamp, agentStatCpuLoadBo.writeValue());

hbaseTemplate.put(AGENT_STAT, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ public void insert(TApiMetaData apiMetaData) {
}

final byte[] apiMetaDataBytes = buffer.getBuffer();
put.add(HBaseTables.API_METADATA_CF_API, apiMetaDataBytes, null);
put.addColumn(HBaseTables.API_METADATA_CF_API, apiMetaDataBytes, null);

hbaseTemplate.put(HBaseTables.API_METADATA, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public void insert(final TAgentInfo agentInfo) {
byte[] qualifier = Bytes.toBytes(agentInfo.getAgentId());
byte[] value = Bytes.toBytes(agentInfo.getServiceType());

put.add(APPLICATION_INDEX_CF_AGENTS, qualifier, value);
put.addColumn(APPLICATION_INDEX_CF_AGENTS, qualifier, value);

hbaseTemplate.put(APPLICATION_INDEX, put);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ public void insert(final TSpan span) {
final byte[] distributedKey = crateRowKey(span, acceptedTime);
Put put = new Put(distributedKey);

put.add(APPLICATION_TRACE_INDEX_CF_TRACE, makeQualifier(span) , acceptedTime, value);
put.addColumn(APPLICATION_TRACE_INDEX_CF_TRACE, makeQualifier(span) , acceptedTime, value);

hbaseTemplate.put(APPLICATION_TRACE_INDEX, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ public void insert(TSqlMetaData sqlMetaData) {
byte[] sqlBytes = Bytes.toBytes(sql);

// added sqlBytes into qualifier intentionally not to conflict hashcode
put.add(HBaseTables.SQL_METADATA_CF_SQL, sqlBytes, null);
put.addColumn(HBaseTables.SQL_METADATA_CF_SQL, sqlBytes, null);

hbaseTemplate.put(HBaseTables.SQL_METADATA, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ public void insert(TStringMetaData stringMetaData) {
String stringValue = stringMetaData.getStringValue();
byte[] sqlBytes = Bytes.toBytes(stringValue);
// added sqlBytes into qualifier intentionally not to conflict hashcode
put.add(HBaseTables.STRING_METADATA_CF_STR, sqlBytes, null);
put.addColumn(HBaseTables.STRING_METADATA_CF_STR, sqlBytes, null);

hbaseTemplate.put(HBaseTables.STRING_METADATA, put);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,12 +82,12 @@ public void insert(final TSpan span) {
byte[] spanId = Bytes.toBytes(spanBo.getSpanId());

long acceptedTime = acceptedTimeService.getAcceptedTime();
put.add(TRACES_CF_SPAN, spanId, acceptedTime, spanValue);
put.addColumn(TRACES_CF_SPAN, spanId, acceptedTime, spanValue);

List<TAnnotation> annotations = span.getAnnotations();
if (CollectionUtils.isNotEmpty(annotations)) {
byte[] bytes = writeAnnotation(annotations);
put.add(TRACES_CF_ANNOTATION, spanId, bytes);
put.addColumn(TRACES_CF_ANNOTATION, spanId, bytes);
}

addNestedSpanEvent(put, span);
Expand All @@ -111,7 +111,7 @@ private void addNestedSpanEvent(Put put, TSpan span) {
SpanEventBo spanEventBo = new SpanEventBo(span, spanEvent);
byte[] rowId = BytesUtils.add(spanEventBo.getSpanId(), spanEventBo.getSequence(), spanEventBo.getAsyncId());
byte[] value = spanEventBo.writeValue();
put.add(TRACES_CF_TERMINALSPAN, rowId, acceptedTime0, value);
put.addColumn(TRACES_CF_TERMINALSPAN, rowId, acceptedTime0, value);
}
}

Expand All @@ -130,7 +130,7 @@ public void insertSpanChunk(TSpanChunk spanChunk) {
byte[] value = spanEventBo.writeValue();
byte[] rowId = BytesUtils.add(spanEventBo.getSpanId(), spanEventBo.getSequence(), spanEventBo.getAsyncId());

put.add(TRACES_CF_TERMINALSPAN, rowId, acceptedTime, value);
put.addColumn(TRACES_CF_TERMINALSPAN, rowId, acceptedTime, value);
}
hbaseTemplate.put(TRACES, put);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@

import com.navercorp.pinpoint.common.util.BytesUtils;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.springframework.data.hadoop.hbase.RowMapper;
import org.springframework.stereotype.Component;
Expand All @@ -31,17 +32,17 @@ public String mapRow(Result result, int rowNum) throws Exception {
if (result.isEmpty()) {
return null;
}
KeyValue[] raw = result.raw();
Cell[] rawCells = result.rawCells();

if (raw.length == 0) {
if (rawCells.length == 0) {
return null;
}

String[] ret = new String[raw.length];
String[] ret = new String[rawCells.length];
int index = 0;

for (KeyValue kv : raw) {
ret[index++] = BytesUtils.toString(kv.getQualifier());
for (Cell cell : rawCells) {
ret[index++] = BytesUtils.toString(CellUtil.cloneQualifier(cell));
}

return ret[0];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -307,9 +307,9 @@ public Object doInTable(HTableInterface htable) throws Throwable {
Put put = new Put(rowName);
if (familyName != null) {
if (timestamp == null) {
put.add(familyName, qualifier, value);
put.addColumn(familyName, qualifier, value);
} else {
put.add(familyName, qualifier, timestamp, value);
put.addColumn(familyName, qualifier, timestamp, value);
}
}
htable.put(put);
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ public void deleteAgentId(String applicationName, String agentId) {
byte[] rowKey = Bytes.toBytes(applicationName);
Delete delete = new Delete(rowKey);
byte[] qualifier = Bytes.toBytes(agentId);
delete.deleteColumns(HBaseTables.APPLICATION_INDEX_CF_AGENTS, qualifier);
delete.addColumn(HBaseTables.APPLICATION_INDEX_CF_AGENTS, qualifier);
hbaseOperations2.delete(HBaseTables.APPLICATION_INDEX, delete);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@
import java.util.ArrayList;
import java.util.List;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
Expand Down Expand Up @@ -179,14 +180,15 @@ public void handleLastResult(Result lastResult) {
if (lastResult == null) {
return;
}
KeyValue[] keyValueArray = lastResult.raw();
KeyValue last = keyValueArray[keyValueArray.length - 1];
byte[] row = last.getRow();

Cell[] rawCells = lastResult.rawCells();
Cell last = rawCells[rawCells.length - 1];
byte[] row = CellUtil.cloneRow(last);
byte[] originalRow = traceIdRowKeyDistributor.getOriginalKey(row);
long reverseStartTime = BytesUtils.bytesToLong(originalRow, PinpointConstants.APPLICATION_NAME_MAX_LEN);
this.lastRowTimestamp = TimeUtils.recoveryTimeMillis(reverseStartTime);

byte[] qualifier = last.getQualifier();
byte[] qualifier = CellUtil.cloneQualifier(last);
this.lastTransactionId = TransactionIdMapper.parseVarTransactionId(qualifier, 0);
this.lastTransactionElapsed = BytesUtils.bytesToInt(qualifier, 0);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
@Repository
public class HbaseHostApplicationMapDao implements HostApplicationMapDao {

private Logger logger = LoggerFactory.getLogger(this.getClass());
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private int scanCacheSize = 10;

@Autowired
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@
@Repository
public class HbaseMapStatisticsCallerDao implements MapStatisticsCallerDao {

private Logger logger = LoggerFactory.getLogger(this.getClass());
private final Logger logger = LoggerFactory.getLogger(this.getClass());
private int scanCacheSize = 40;

@Autowired
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

package com.navercorp.pinpoint.web.mapper;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.springframework.data.hadoop.hbase.RowMapper;
Expand All @@ -37,11 +38,11 @@ public List<String> mapRow(Result result, int rowNum) throws Exception {
if (result.isEmpty()) {
return Collections.emptyList();
}
final KeyValue[] raw = result.raw();
final List<String> agentIdList = new ArrayList<String>(raw.length);
final Cell[] rawCells = result.rawCells();
final List<String> agentIdList = new ArrayList<String>(rawCells.length);

for (KeyValue kv : raw) {
final String agentId = Bytes.toString(kv.getQualifier());
for (Cell cell : rawCells) {
final String agentId = Bytes.toString(CellUtil.cloneQualifier(cell));
agentIdList.add(agentId);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,8 @@
import com.navercorp.pinpoint.common.util.BytesUtils;
import com.navercorp.pinpoint.common.util.TimeUtils;

import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;
import org.slf4j.Logger;
Expand Down Expand Up @@ -51,25 +52,25 @@ public List<AgentInfoBo> mapRow(Result result, int rowNum) throws Exception {
if (result.isEmpty()) {
return Collections.emptyList();
}
KeyValue[] raw = result.raw();
Cell[] rawCells = result.rawCells();

List<AgentInfoBo> agentInfoBoList = new ArrayList<AgentInfoBo>(raw.length);
for (KeyValue keyValue : raw) {
AgentInfoBo agentInfoBo = mappingAgentInfo(keyValue);
List<AgentInfoBo> agentInfoBoList = new ArrayList<AgentInfoBo>(rawCells.length);
for (Cell cell : rawCells) {
AgentInfoBo agentInfoBo = mappingAgentInfo(cell);

agentInfoBoList.add(agentInfoBo);
}

return agentInfoBoList;
}

private AgentInfoBo mappingAgentInfo(KeyValue keyValue) {
byte[] rowKey = keyValue.getRow();
private AgentInfoBo mappingAgentInfo(Cell cell) {
byte[] rowKey = CellUtil.cloneRow(cell);
String agentId = Bytes.toString(rowKey, 0, PinpointConstants.AGENT_NAME_MAX_LEN - 1).trim();
long reverseStartTime = BytesUtils.bytesToLong(rowKey, PinpointConstants.AGENT_NAME_MAX_LEN);
long startTime = TimeUtils.recoveryTimeMillis(reverseStartTime);

final AgentInfoBo.Builder builder = new AgentInfoBo.Builder(keyValue.getValue());
final AgentInfoBo.Builder builder = new AgentInfoBo.Builder(CellUtil.cloneValue(cell));
builder.setAgentId(agentId);
builder.setStartTime(startTime);
// TODO fix
Expand Down
Loading

0 comments on commit 2325a7b

Please sign in to comment.