public List<String> getOvertimePeople(Long groupId) {
String redisUserIds = this.redisTemplate.opsForValue().get(groupId + OVERTIME_USER_ID);
if (!StringUtils.isEmpty(redisUserIds)) {
List<String> strings = JSON.parseArray(redisUserIds, String.class);
return strings;
} else {
List<Map<String, Object>> overTimeData = studentInfoVoRepository.findOverTimeData(groupId);
List<String> accounts = studentInfoVoRepository.findUserIds(groupId);
List<String> resultUserIds = new ArrayList<>();
List<String> dates = new ArrayList<>();
List accountList = new ArrayList<>();
try {
List<AccountDate> accountDates = RainMUtils.convertListMapToListBean(overTimeData, AccountDate.class);
List<List<String>> partitionAccount = Lists.partition(accounts, 50);
accountDates.stream().forEach(m -> { dates.add(m.getExitDate()); });
List<String> setDates = dates.parallelStream().distinct().collect(Collectors.toList());
ThreadPoolExecutor threadPoolExecutor = new ThreadPoolExecutor(50, 50, 0L,
TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>(10), new ThreadPoolExecutor.DiscardPolicy());
for (int i = 0; i < partitionAccount.size(); i++) {
final int num = i;
Runnable task = new Runnable() {
@Override
public void run() {
try {
List<String> minAccount = partitionAccount.get(num);
List<AccountDate> accountDates1 = new ArrayList<>();
minAccount.stream().forEach(min -> {
List<AccountDate> collect = accountDates.parallelStream().filter(m -> min.equals(m.getAccount())).collect(Collectors.toList());
accountDates1.addAll(collect);
});
minAccount.stream().forEach(account -> {
setDates.stream().forEach(date -> {
List<Integer> courseIds = new ArrayList<>();
accountDates1.stream().filter(m -> account.equals(m.getAccount()))
.filter(m1 -> date.equals(m1.getExitDate()))
.forEach(accountDate -> {
courseIds.add(accountDate.getCourseId());
});
if (Constants.limit_Number_Lessons_PerDay < courseIds.parallelStream().distinct().collect(Collectors.toList()).size()) {
resultUserIds.add(account);
}
courseIds.clear();
});
});
} catch (Exception e) {
e.printStackTrace();
throw new BusinessException("分析数据多线程出错", ResultCode.FAILURE);
}
}
};
threadPoolExecutor.submit(task);
}
threadPoolExecutor.shutdown();
threadPoolExecutor.awaitTermination(1, TimeUnit.DAYS);
System.gc();
accountList = resultUserIds.parallelStream().distinct().collect(Collectors.toList());
this.redisTemplate.opsForValue().set(groupId + OVERTIME_USER_ID, accountList.toString(), 1, TimeUnit.DAYS);
return accountList;
}catch (Exception e) {
// e.printStackTrace();
throw new SelfException("数据统计错误", ResultCode.BAD);
}
}
}
Redis + ThreadPoolExecutor 处理大规模数据
最新推荐文章于 2024-01-27 11:20:25 发布