自动化测试中如何使用kafka校验发送的消息内容正确

  • kafka工具类:

@Slf4j
public class KafkaUtils {

    public static class KafkaConsumerUtil {

            public KafkaConsumer<String, String> createConsumer(String topic) {
                Properties props = new Properties();
                props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "xxxx.com:39094");
                props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
                props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-group-" + UUID.randomUUID());
                props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
                props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
                props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
                props.put("security.protocol", "SASL_PLAINTEXT"); // 根据实际情况选择 SASL_PLAINTEXT/SASL_SSL
                props.put("sasl.mechanism", "PLAIN");
                props.put("sasl.jaas.config",
                        "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"xxx\"" + "password=\"xxxx\";");   // 替换实际密码

                log.info("props是", props);
                KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props);
                consumer.subscribe(Collections.singletonList(topic));
                consumer.poll(Duration.ZERO); // 触发分区分配
                consumer.seekToEnd(consumer.assignment()); // 定位到末尾
                return consumer;
            }
        }



    }






  • 测试脚本:


@Slf4j
public class MOrderUnfinishedStampUploadPostScenario extends BaseNAFCase {

    private MOrderStampTemplateUploadPost api= new MOrderStampTemplateUploadPost();
    private StampHandCreatePost api2= new StampHandCreatePost();
    File excelFile ;
    String excelFilePath ;
    StampHandCreatePostRequestBody excelBody = new StampHandCreatePostRequestBody();

    @Autowired
    MadeOrderMapper mapper;
    private KafkaUtils.KafkaConsumerUtil kafkaConsumerUtil = new KafkaUtils.KafkaConsumerUtil(); // 直接实例化






    @BeforeMethod
    @Override
    public void initData() {
        // 初始化数据
        // 项目的根目录
        String rootDir = System.getProperty("user.dir");
        // 使用Paths和Path类构建相对路径
        Path relativePath = Paths.get(rootDir)
                .resolve("src/test/resources/case/xx/ExcelData/xx订单.xlsx");

        excelFilePath = relativePath.toString();
        excelFile = new File(excelFilePath);
        // 初始化StampHandCreatePostRequestBody请求参数
        excelBody = new StampHandCreatePostRequestBody();

    }
    // 后置数据库删除
    @AfterMethod
    @Override
    public void tearDown() {
        int i = mapper.deleteTestData();
        log.info("Ω数据库删除列数量>>>>>>>>>>"+i);
        assertEquals(i,4);
    }


    
    @Test(enabled = true,priority = 1)
    public void UploadPost() throws IOException, InterruptedException {
//        新起一个线程,创建一个kafka消费者,用于消费推送的kafka
        Thread thread = new Thread(() -> {
            String topic = "xx";
            KafkaConsumer<String, String> consumer =  kafkaConsumerUtil.createConsumer(topic);
            boolean found = false;
            long timeoutMs = 10000000;
            long startTime = System.currentTimeMillis();
            ObjectMapper mapper = new ObjectMapper();

            try {
                while (System.currentTimeMillis() - startTime < timeoutMs && !found) {
                    ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis(500));
                    log.info("ConsumerRecords>>>>>>>>>>"+records);
                    for (ConsumerRecord<String, String> record : records) {
                        log.info("Consumed message: Key = {}, Value = {}", record.key(), record.value());
                        JsonNode actualNode = mapper.readTree(record.value());
                        String materialNo = actualNode.get("materialNo").asText();
                        log.info("materialNo是>>>>>>" + materialNo);
//                        校验具体字段
                        assertFiels(materialNo,actualNode,found);

                    }
                }
            } catch (JsonMappingException e) {
                throw new RuntimeException(e);
            } catch (JsonProcessingException e) {
                throw new RuntimeException(e);
            } finally {
                consumer.close();
            }
            log.info("found是",found);
            Assert.assertTrue(found, "未找到预期的Kafka消息");
        });
        thread.start();
        Thread.sleep(1000);

//        上传xx文件
        ReadContext f ;
        if (excelFile!=null && excelFile.exists()) {
            // 文件存在,可以进行后续操作
            // 创建HTTP POST请求
            api.addParam("materialType",0);
            f = api.addMultiPart("file", excelFile).asJsonPath();
            HashMap<String,Object> map =f.json();
            // info查看
            log.info("Ω第一次请求状态码>>>>>>>>>"+map.get("code"));
            assertEquals(map.get("code"),200);
            //断言检查

        } else {
            // 文件不存在,处理错误
            throw new IllegalArgumentException("文件不存在: " + excelFilePath);
        }

        // 需要从第一次请求中获取到参数,并作为第二次请求的入参
        // 参数处理
        String json = f.jsonString();
        ObjectMapper objectMapper = new ObjectMapper();

        // 解析JSON字符串为JsonNode对象
        JsonNode rootNode = objectMapper.readTree(json);

        // 从JsonNode中获取"data"字段
        JsonNode dataNode = rootNode.get("data");

        // 从"data"字段中获取"records"数组
        ArrayNode recordsArray = (ArrayNode) dataNode.get("records");

        // 遍历records数组并处理每个元素
        for (JsonNode recordNode : recordsArray) {
            // 在这里处理每个recordNode
            StampHandCreatePostRequestBodyOrderDetailParams orderParams = objectMapper
                    .readValue(recordNode.traverse(), StampHandCreatePostRequestBodyOrderDetailParams.class);
            excelBody.addOrderDetailParamsItem(orderParams);
        }
        excelBody.setMaterialType(1);

        //第二次请求
        ReadContext json2 = api2.setBody(excelBody).asJsonPath();
        //断言检查
        HashMap map=json2.json();
        log.info("Ω第二次请求状态码>>>>>>>>>>"+map.get("code"));
        assertEquals(map.get("code"),200);



    }
    public void assertFiels(String materialNo,JsonNode actualNode,boolean found){
        if ("xxxx".equals(materialNo)) {
            assertEquals(actualNode.get("plantNo").asText(), "xxxx");
            assertEquals(actualNode.get("orderDate").asText(), "2023-05-02");
            assertEquals(actualNode.get("orderType").asText(), "xxxx");
            assertEquals(actualNode.get("quantity").asInt(), 100);
            assertEquals(actualNode.get("orderSerialNo").asText(), "xxxx"");
            assertEquals(actualNode.get("orderReason").asText(), "xxxx");
            assertEquals(actualNode.get("productLineCode").asText(), "xxxx"");
            assertEquals(actualNode.get("productTime").asText(), "xxxx");
            found = true;
            log.info("FCA0100021 found是" + found);
        }
if ("xxxx".equals(materialNo)) {
            assertEquals(actualNode.get("plantNo").asText(), "xxxx");
            assertEquals(actualNode.get("orderDate").asText(), "2023-05-02");
            assertEquals(actualNode.get("orderType").asText(), "xxxx");
            assertEquals(actualNode.get("quantity").asInt(), 100);
            assertEquals(actualNode.get("orderSerialNo").asText(), "xxxx"");
            assertEquals(actualNode.get("orderReason").asText(), "xxxx");
            assertEquals(actualNode.get("productLineCode").asText(), "xxxx"");
            assertEquals(actualNode.get("productTime").asText(), "xxxx");
            found = true;
            log.info("FCA0100021 found是" + found);
        }
        
        else{
            found = false;
        }

    }
}

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值