代理执行document.save

本文探讨了使用代理执行document.save时,如何确保不会触发原documentform中的querysave或postsave事件,提供了实现方法和注意事项。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

代理执行document.save , 不会运行原document form 中的程序,如querysave 或postsave里的代码?

 

package com.xymzsfxy.backend.service; import com.xymzsfxy.backend.entity.PriceHistory; import com.xymzsfxy.backend.entity.Product; import com.xymzsfxy.backend.repository.PriceHistoryRepository; import com.xymzsfxy.backend.repository.ProductRepository; import lombok.extern.slf4j.Slf4j; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.scheduling.annotation.Async; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Transactional; import java.io.IOException; import java.math.BigDecimal; import java.time.LocalDateTime; import java.util.Map; @Slf4j @Service public class PriceCrawlerService { private final ProductRepository productRepository; private final PriceHistoryRepository priceHistoryRepository; @Value("#{${crawler.sources}}") private Map<String, String> crawlerSources; @Value("#{${crawler.selectors}}") private Map<String, String> crawlerSelectors; @Autowired public PriceCrawlerService(ProductRepository productRepository, PriceHistoryRepository priceHistoryRepository) { this.productRepository = productRepository; this.priceHistoryRepository = priceHistoryRepository; } @Async("crawlerTaskExecutor") public void crawlPrices(Long productId) { Product product = productRepository.findById(productId) .orElseThrow(() -> new IllegalArgumentException("无效商品ID: " + productId)); crawlerSources.forEach((sourceName, urlTemplate) -> { try { String targetUrl = String.format(urlTemplate, product.getExternalId()); Document doc = Jsoup.connect(targetUrl) .timeout(10000) .userAgent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36") .get(); String selector = crawlerSelectors.get(sourceName); String priceText = doc.selectFirst(selector).text(); BigDecimal price = parsePrice(priceText); savePriceData(product, sourceName, price); } catch (IOException e) { log.error("[{}] 网络请求失败: {}", sourceName, e.getMessage()); } catch (Exception e) { log.error("[{}] 数据处理异常: {}", sourceName, e.getMessage()); } }); } private BigDecimal parsePrice(String priceText) { String numericPrice = priceText.replaceAll("[^\\d.,]", ""); return new BigDecimal(numericPrice.replace(",", "")); } @Transactional protected void savePriceData(Product product, String source, BigDecimal price) { // 保存价格记录 PriceHistory record = new PriceHistory(); record.setProductId(product.getId()); record.setSource(source); record.setPrice(price); record.setCrawlTime(LocalDateTime.now()); priceHistoryRepository.save(record); // 更新商品最新价格 product.setLatestPrice(price); product.setUpdatedTime(LocalDateTime.now()); productRepository.save(product); } }框架是springboot请重写代码功能不变
最新发布
03-09
一、爬虫模块实现(价格自动更新) #### 1. 爬虫服务核心类 @Service public class PriceCrawlerService { @Autowired private ProductRepository productRepository; @Autowired private PriceHistoryRepository priceHistoryRepository; // 支持多数据源的爬取配置 @Value("#{${crawler.sources}}") private Map<String, String> crawlerSources; @Async("crawlerTaskExecutor") public void crawlPrices(Long productId) { Product product = productRepository.findById(productId).orElseThrow(); crawlerSources.forEach((sourceName, urlTemplate) -> { try { String targetUrl = String.format(urlTemplate, product.getExternalId()); Document doc = Jsoup.connect(targetUrl) .timeout(10000) .userAgent("Mozilla/5.0") .get(); Elements priceElements = doc.select(crawlerConfig.getSelector(sourceName)); BigDecimal price = parsePrice(priceElements.first().text()); savePriceData(product, sourceName, price); } catch (Exception e) { log.error("爬取{}数据失败: {}", sourceName, e.getMessage()); } }); } private void savePriceData(Product product, String source, BigDecimal price) { PriceHistory newPrice = new PriceHistory(); newPrice.setProductId(product.getId()); newPrice.setSource(source); newPrice.setPrice(price); newPrice.setCrawlTime(LocalDateTime.now()); priceHistoryRepository.save(newPrice); } } #### 2. 定时任务配置 @Configuration @EnableScheduling public class CrawlerScheduleConfig { @Autowired private PriceCrawlerService crawlerService; // 每天凌晨执行全量爬取 @Scheduled(cron = "0 0 2 * * ?") public void fullCrawl() { productRepository.findAll().forEach(product -> crawlerService.crawlPrices(product.getId())); } // 每小时执行增量爬取 @Scheduled(cron = "0 0 */1 * * ?") public void incrementalCrawl() { productRepository.findUpdatedRecently(1).forEach(product -> crawlerService.crawlPrices(product.getId())); } } #### 3. 异步任务配置 @Configuration @EnableAsync public class AsyncConfig { @Bean("crawlerTaskExecutor") public Executor taskExecutor() { ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); executor.setCorePoolSize(5); executor.setMaxPoolSize(10); executor.setQueueCapacity(100); executor.setThreadNamePrefix("CrawlerThread-"); executor.initialize(); return executor; } }给出详细的代码和步骤
03-09
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值