public String getHabits(String bloggerId, Integer type, String stime, String etime) {
String result = "";
String index = "";
if (type == 1) {
index = "AAAAAAAAA";
} else if (type == 2) {
index = "BBBBBBBBBB";
}
String[] types = new String[]{index};
SearchClient client = null;
Map<String, Integer> map = new HashMap<>();
try {
client = searchIndexer.getSearchClient(null, types);
if (type == 1) {
// client.addPrimitiveTermQuery("fb_id", bloggerId, ISIOperator.MUST);
client.addPrimitiveTermQuery("tw_id", bloggerId, ISIOperator.MUST);
} else if (type == 2) {
client.addPrimitiveTermQuery("tw_id", bloggerId, ISIOperator.MUST);
}
if (StringUtils.isNotEmpty(stime) && StringUtils.isNotEmpty(etime)) {
client.addRangeQuery("pubtime", stime, RangeCommon.GTE, etime, RangeCommon.LT, ISIOperator.MUST);
}
result = client.addPubtimeAgg();
} catch (Exception e) {
e.printStackTrace();
} finally {
if(client != null){
client.close();
}
}
return result;
}
public String addPubtimeAgg(){
DateHistogramBuilder dateHistogramBuilder = AggregationBuilders.dateHistogram("pubtime_histogram")
.field("pubtime").interval(DateHistogram.Interval.HOUR)
.minDocCount(0L)
// .extendedBounds(0L, 24L) // 设置时间范围为左开右闭的 [0-24) 点
// .subAggregation(
// AggregationBuilders.cardinality("unique_docs")
// .field("auto_id")
// );
;
searchbuilder.addAggregation(dateHistogramBuilder);
if (this.query != null) {
this.searchbuilder.setQuery(this.query);
}
SearchResponse sr = searchbuilder.execute().actionGet();
// 解析结果
Histogram pubtimeHistogram = sr.getAggregations().get("pubtime_histogram");
// Cardinality uniqueDocsAgg = (Cardinality) pubtimeHistogram.getBucketByKey("unique_docs");
// long totalUniqueDocs = uniqueDocsAgg.getValue();
// System.out.println("0-8点文档个数:" + getBucketCount(pubtimeHistogram, 0, 8));
// System.out.println("8-12点文档个数:" + getBucketCount(pubtimeHistogram, 8, 12));
// System.out.println("12-18点文档个数:" + getBucketCount(pubtimeHistogram, 12, 18));
// System.out.println("18-24点文档个数:" + getBucketCount(pubtimeHistogram, 18, 24));
// System.out.println("总文档个数:" + totalUniqueDocs);
HashMap<String, Long> map = new HashMap<>();
long bucketCount = getBucketCount(pubtimeHistogram, 0, 8);
long bucketCount1 = getBucketCount(pubtimeHistogram, 8, 12);
long bucketCount2 = getBucketCount(pubtimeHistogram, 12, 18);
long bucketCount3 = getBucketCount(pubtimeHistogram, 18, 24);
map.put("0-8", bucketCount);
map.put("8-12", bucketCount1);
map.put("12-18", bucketCount2);
map.put("18-24", bucketCount3);
String keyWithMaxValue = map.entrySet().stream()
.max(Map.Entry.comparingByValue())
.get().getKey();
return keyWithMaxValue;
}
private static long getBucketCount(Histogram histogram, int startHour, int endHour) {
long count = 0;
for (Histogram.Bucket bucket : histogram.getBuckets()) {
String hourStr = bucket.getKey();
Integer hour = TimeUtil.getHour(hourStr);
if (hour >= startHour && hour < endHour) {
count = count +bucket.getDocCount();
}
}
return count;
}