kafka(五)spring-kafka(2)详解与demo

一、简单的收发消息demo

父工程pom:

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>com.example</groupId>
    <artifactId>kafka-demo</artifactId>
    <version>1.0-SNAPSHOT</version>
    <packaging>pom</packaging>
    <modules>
        <module>producer</module>
        <module>consumer-1</module>
        <module>consumer-2</module>
    </modules>

    <!-- springBoot -->
    <parent>
        <groupId>org.springframework.boot</groupId>
        <artifactId>spring-boot-starter-parent</artifactId>
        <version>2.1.4.RELEASE</version>
    </parent>

    <properties>
        <maven.compiler.source>8</maven.compiler.source>
        <maven.compiler.target>8</maven.compiler.target>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>

    <dependencies>
        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter</artifactId>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
        </dependency>

        <!--kafka-->
        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
<!--            <version>3.0.0</version>-->
        </dependency>

        <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>

        <!--lombok-->
        <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
        </dependency>

        <dependency>
            <groupId>com.alibaba</groupId>
            <artifactId>fastjson</artifactId>
            <version>1.2.78</version>
        </dependency>

        <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-test</artifactId>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <scope>test</scope>
        </dependency>
    </dependencies>

</project>
1、生产者

1.1、配置文件
spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.value-serializer=org.apache.kafka.common.serialization.StringSerializer

user.topic = userTest
school.topic = schoolTest
1.2、dto
package com.example.dto;

import lombok.Builder;
import lombok.Data;

@Data
@Builder
public class SchoolDTO {
    private String schoolId;
    private String schoolName;
}
package com.example.dto;

import lombok.Builder;
import lombok.Data;

@Data
@Builder
public class UserDTO {
    private String userId;
    private String userName;
    private Integer age;
}
 1.3、service
package com.example.service.impl;

import com.alibaba.fastjson.JSON;
import com.example.dto.SchoolDTO;
import com.example.dto.UserDTO;
import com.example.service.SchoolService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

@Service("schoolService")
@Slf4j
public class SchoolServiceImpl implements SchoolService {

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Value("${school.topic}")
    private String schoolTopic;

    @Override
    public void sendSchoolMsg(SchoolDTO schoolDTO) {
        String msg = JSON.toJSONString(schoolDTO);
        ProducerRecord producerRecord = new ProducerRecord(schoolTopic,msg);
        kafkaTemplate.send(producerRecord);
        log.info("school消息发送成功");
    }
}
package com.example.service.impl;

import com.alibaba.fastjson.JSON;
import com.example.dto.UserDTO;
import com.example.service.UserService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;


@Service("userService")
@Slf4j
public class UserServiceImpl implements UserService {

    @Autowired
    private KafkaTemplate<String, String> kafkaTemplate;

    @Value("${user.topic}")
    private String userTopic;

    @Override
    public void sendUserMsg(UserDTO userDTO) {
        String msg = JSON.toJSONString(userDTO);
        ProducerRecord producerRecord = new ProducerRecord(userTopic,msg);
        kafkaTemplate.send(producerRecord);
        log.info("user消息发送成功");
    }
}
 1.4、启动类
package com.example;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;

@SpringBootApplication
public class ProducerApplication {
    public static void main(String[] args) {
        SpringApplication.run(ProducerApplication.class, args);
    }
}
2、消费者

2.1、配置文件
spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit = true 


user.topic = userTest
user.group.id = user-group-1

school.topic = schoolTest
school.group.id = school-group-1

server.port = 2222
2.2、监听
package com.example.listen;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.handler.annotation.Header;
import org.springframework.stereotype.Component;

import javax.annotation.PostConstruct;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Optional;
import java.util.Properties;

@Component
@Slf4j
public class SchoolConsumer {

    @KafkaListener(topics = "${school.topic}", groupId = "${school.group.id}")
    public void consumer(ConsumerRecord<?, ?> record) {
        try {
            Object message = record.value();
            if (message != null) {
                String msg = String.valueOf(message);
                log.info("接收到:msg={},topic:{},partition={},offset={}",msg,record.topic(),record.partition(),record.offset());

            }
        } catch (Exception e) {
            log.error("topic:{},is consumed error:{}", record.topic(), e.getMessage());
        } finally {
            //ack.acknowledge();
        }
    }
}
package com.example.listen;

import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

@Component
@Slf4j
public class UserConsumer {

    @KafkaListener(topics = "${user.topic}", groupId = "${user.group.id}")
    public void consumer(ConsumerRecord<?, ?> record) {
        try {
            Object message = record.value();
            if (message != null) {
                String msg = String.valueOf(message);
                log.info("接收到:msg={},topic:{},partition={},offset={}",msg,record.topic(),record.partition(),record.offset());

            }
        } catch (Exception e) {
            log.error("topic:{},is consumed error:{}", record.topic(), e.getMessage());
        } finally {
            //ack.acknowledge();
        }
    }
}

不指定group.id会报错,这也验证了kafka consumer必须要有group id。如写:

@KafkaListener(topics = "${user.topic}")
public void consumer(ConsumerRecord<?, ?> record) 启动报错:

Caused by: java.lang.IllegalStateException: No group.id found in consumer config, container properties, or @KafkaListener annotation; a group.id is required when group management is used.
    at org.springframework.util.Assert.state(Assert.java:73) ~[spring-core-5.1.6.RELEASE.jar:5.1.6.RELEASE]
 

2.3、启动类 
package com.example;

import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.annotation.EnableKafka;

@SpringBootApplication
//@EnableKafka
public class Consumer1Application {
    public static void main(String[] args) {
        SpringApplication.run(Consumer1Application.class, args);
    }
}
3、测试

启动消费者:

生产者这里通过单元测试来发送消息:

package com.demo.kafka;

import com.example.ProducerApplication;
import com.example.dto.SchoolDTO;
import com.example.dto.UserDTO;
import com.example.service.SchoolService;
import com.example.service.UserService;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;

@SpringBootTest(classes = {ProducerApplication.class}, webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@RunWith(SpringRunner.class)
public class Test {

    @Autowired
    private SchoolService schoolService;

    @Autowired
    private UserService userService;

    @org.junit.Test
    public void sendUserMsg(){
        UserDTO userDTO = UserDTO.builder()
                .userId("id-1")
                .age(18)
                .userName("zs")
                .build();
        userService.sendUserMsg(userDTO);
    }

    @org.junit.Test
    public void sendSchoolMsg(){
        SchoolDTO schoolDTO = SchoolDTO.builder()
                .schoolId("schoolId-1")
                .schoolName("mid school")
                .build();
        schoolService.sendSchoolMsg(schoolDTO);
    }

}

运行单测,观察消费者输出:

修改参数再次运行,观察到消费者都可以正常监听: 

2024-06-22 17:09:06.383  INFO 76104 --- [           main] o.s.b.w.embedded.tomcat.TomcatWebServer  : Tomcat started on port(s): 2222 (http) with context path ''
2024-06-22 17:09:06.390  INFO 76104 --- [ntainer#1-0-C-1] org.apache.kafka.clients.Metadata        : Cluster ID: ZdpIAHTjS9GhJlvPP8n0Rw
2024-06-22 17:09:06.392  INFO 76104 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Discovered group coordinator localhost:9092 (id: 2147483647 rack: null)
2024-06-22 17:09:06.393  INFO 76104 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Revoking previously assigned partitions []
2024-06-22 17:09:06.394  INFO 76104 --- [ntainer#1-0-C-1] o.s.k.l.KafkaMessageListenerContainer    : partitions revoked: []
2024-06-22 17:09:06.394  INFO 76104 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] (Re-)joining group
2024-06-22 17:09:06.401  INFO 76104 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-2, groupId=school-group-1] Successfully joined group with generation 11
2024-06-22 17:09:06.403  INFO 76104 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator  : [Consumer clientId=consumer-2, groupId=school-group-1] Setting newly assigned partitions [schoolTest-0]
2024-06-22 17:09:06.403  INFO 76104 --- [           main] com.example.Consumer1Application         : Started Consumer1Application in 8.606 seconds (JVM running for 9.621)
2024-06-22 17:09:06.413  INFO 76104 --- [ntainer#0-0-C-1] o.s.k.l.KafkaMessageListenerContainer    : partitions assigned: [schoolTest-0]
2024-06-22 17:09:06.491  INFO 76104 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Successfully joined group with generation 3
2024-06-22 17:09:06.493  INFO 76104 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Setting newly assigned partitions [userTest-0]
2024-06-22 17:09:06.611  INFO 76104 --- [ntainer#1-0-C-1] o.s.k.l.KafkaMessageListenerContainer    : partitions assigned: [userTest-0]
2024-06-22 17:16:29.775  INFO 76104 --- [ntainer#1-0-C-1] com.example.listen.UserConsumer          : 接收到:msg={"age":18,"userId":"id-1","userName":"zs"},topic:userTest,partition=0,offset=4
2024-06-22 17:16:48.157  INFO 76104 --- [ntainer#0-0-C-1] com.example.listen.SchoolConsumer        : 接收到:msg={"schoolId":"schoolId-1","schoolName":"mid school"},topic:schoolTest,partition=0,offset=1
2024-06-22 17:17:39.458  INFO 76104 --- [ntainer#1-0-C-1] com.example.listen.UserConsumer          : 接收到:msg={"age":20,"userId":"id-2","userName":"ls"},topic:userTest,partition=0,offset=5
2024-06-22 17:17:59.474  INFO 76104 --- [ntainer#0-0-C-1] com.example.listen.SchoolConsumer        : 接收到:msg={"schoolId":"schoolId-2","schoolName":"primary school"},topic:schoolTest,partition=0,offset=2
4、多个消费者
4.1、同一个groupId

将consumer-1的代码copy到consumer-2,注意端口号修改成不一样的3333,并启动,

spring.kafka.bootstrap-servers=localhost:9092
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.enable-auto-commit = true 


user.topic = userTest
user.group.id = user-group-1

school.topic = schoolTest
school.group.id = school-group-1

server.port = 3333
2024-06-22 17:23:59.524  INFO 78096 --- [           main] o.s.b.w.embedded.tomcat.TomcatWebServer  : Tomcat started on port(s): 3333 (http) with context path ''
2024-06-22 17:23:59.531  INFO 78096 --- [           main] example.Consumer2Application             : Started Consumer2Application in 7.534 seconds (JVM running for 8.506)
2024-06-22 17:24:00.021  INFO 78096 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-2, groupId=school-group-1] Successfully joined group with generation 12
2024-06-22 17:24:00.024  INFO 78096 --- [ntainer#0-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator  : [Consumer clientId=consumer-2, groupId=school-group-1] Setting newly assigned partitions []
2024-06-22 17:24:00.025  INFO 78096 --- [ntainer#0-0-C-1] o.s.k.l.KafkaMessageListenerContainer    : partitions assigned: []
2024-06-22 17:24:00.028  INFO 78096 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.AbstractCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Successfully joined group with generation 4
2024-06-22 17:24:00.028  INFO 78096 --- [ntainer#1-0-C-1] o.a.k.c.c.internals.ConsumerCoordinator  : [Consumer clientId=consumer-4, groupId=user-group-1] Setting newly assigned partitions []
2024-06-22 17:24:00.029  INFO 78096 --- [ntainer#1-0-C-1] o.s.k.l.KafkaMessageListenerContainer    : partitions assigned: []

再执行次生产者的Test,观察两个消费者:

 可以看到consumer-1接收到了,而consumer-2没有接收到。

再次执行,结果相同。school也是同样的结果。

验证了:同一个topic下的某个分区只能被消费者组中的一个消费者消费。

4.2、不同group

现修改cosumer-2中groupId并重启

user.group.id = user-group-2
school.group.id = school-group-2

启动后自动接收了之前发送的所有消息(因为这是一个新的消费者组):

再次发送新的消息:

可以看到consumer-1和2同时都接收到了:

 

验证了:同一个topic可以被不同的消费者组消费。

二、生产者分区partition

先观察上面步骤产生的数据文件:

上面只有一个patition, 所有两个topic各自只有一个数据目录。现将userTest这个topic分成多个partition,结合四种分区策略看下:

1、指定分区
2、轮询
3、key哈希分区策略
4、自定义分区策略(即自定义Partitioner)

三、消费者分配策略

本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:/a/733745.html

如若内容造成侵权/违法违规/事实不符,请联系我们进行投诉反馈qq邮箱809451989@qq.com,一经查实,立即删除!

相关文章

Android面试题:App性能优化之Java和Kotlin常见的数据结构

本文首发于公众号“AntDream”&#xff0c;欢迎微信搜索“AntDream”或扫描文章底部二维码关注&#xff0c;和我一起每天进步一点点 Java常见数据结构特点 ArrayList ArrayList底层是基于数组实现add、删除元素需要进行元素位移耗性能&#xff0c;但查找和修改块适合不需要频…

全面国产化信创适配改造方案说明

一、概叙 系统的全面国产化适配改造需要从多个方面进行考虑&#xff0c;改造前需要进行充分的论证&#xff0c;在满足具体业务场景的前提下&#xff0c;以确保系统的稳定性和安全性&#xff0c;同时还要考虑技术的发展&#xff0c;不断优化和更新。因此全面国产化适配改造也面临…

沙奇里再造世界波,容声注定与经典结缘

足球的世界&#xff0c;就像人生一样&#xff0c;包罗万象&#xff0c;千人千面。有人天生就是王者&#xff0c;有人怎么努力也碌碌无为&#xff0c;而有的人&#xff0c;平时看似没有那么闪耀&#xff0c;却注定为大场面而生。 比如瑞士国脚沙奇里。在对阵苏格兰的欧洲杯小组…

【CT】LeetCode手撕—415. 字符串相加

目录 题目1- 思路2- 实现⭐415. 字符串相加——题解思路 3- ACM 实现 题目 原题连接&#xff1a;415. 字符串相加 1- 思路 模式识别&#xff1a;字符串相加 逆向遍历过程模拟 数据结构 ① String res &#xff1a;记录res 、② carry 记录进位值① 定义两个整数遍历 nums1 …

【第一性原理】邓巴数字

这里写自定义目录标题 什么是邓巴数字邓巴数背后的科学历史上各个组织的人数与邓巴数字的关系在人类进化中的意义现代社会中邓巴数字的体现邓巴数字的意义其他与沟通相关的数据注意事项结论参考 罗宾邓巴教授生于1947年&#xff0c;进化心理学家&#xff0c;牛津大学教授&#…

【SkiaSharp绘图09】SKBitmap属性详解

文章目录 SKBitmap与Bitmap性能对比对比结果 构造函数SKBitmap()SKBitmap(SKImageInfo)SKBitmap(Int32, Int32, SKColorType, SKAlphaType, SKColorSpace) SKBitmap属性AlphaTypeByteCountBytesBytesPerPixelColorSpaceColorTypeDrawsNothingInfoIsEmptyIsImmutableIsNullPixel…

投资者回归理性?美股去年备受追捧的AI概念股,今年超过一半在下跌

喊两句AI就能圈钱的日子一去不复返了&#xff0c;未来企业要用实打实的业绩说话。 正文 去年备受追捧的AI概念股中&#xff0c;今年绝大多数已经开始下跌。面对越来越谨慎的投资者&#xff0c; 上市公司或许很难再打着AI的旗号圈钱。 今年&#xff0c;标普500指数中有60%的股…

动态规划:基本概念

Dynamic Programming 动态规划&#xff08;Dynamic Programming, DP&#xff09; 是一种算法设计技巧&#xff0c;通常用来解决具有重叠子问题和最优子结构性质的问题。它通过将问题分解为更小的子问题&#xff0c;逐步解决这些子问题并将结果存储起来&#xff0c;以避免重复计…

U-Net for Image Segmentation

1.Unet for Image Segmentation 笔记来源&#xff1a;使用Pytorch搭建U-Net网络并基于DRIVE数据集训练(语义分割) 1.1 DoubleConv (Conv2dBatchNorm2dReLU) import torch import torch.nn as nn import torch.nn.functional as F# nn.Sequential 按照类定义的顺序去执行模型&…

win10/11磁盘管理

win10/11磁盘管理 合并磁盘分区的前提是你的两个磁盘区域是相邻的&#xff0c;比如如下&#xff1a; 如果需要吧这个磁盘进行分解&#xff0c;你可以选择压缩一部分磁盘或者是直接删除卷 我这里的话&#xff0c;因为压缩出来的卷和C盘好像是不相邻的&#xff08;我之前做过&…

【SpringCloud-Seata源码分析2】

文章目录 分支事务注册-客户端分支事务服务端的执行 分支事务注册-客户端 第一篇我们将全局事务启动&#xff0c;以及开启源码分析完成了&#xff0c;现在我们需要看一下分支事务注册。 我们分支事务的开始需要从PreparedStatementProxy#executeUpdate中去看。 public class…

GPT-4o一夜被赶超,Claude 3.5一夜封王|快手可灵大模型推出图生视频功能|“纯血”鸿蒙大战苹果AI|智谱AI“钱途”黯淡|月之暗面被曝进军美国

快手可灵大模型推出图生视频功能“纯血”鸿蒙大战苹果AI&#xff0c;华为成败在此一举大模型低价火拼间&#xff0c;智谱AI“钱途”黯淡手握新“王者”&#xff0c;腾讯又跟渠道干上了“美食荒漠”杭州&#xff0c;走出一个餐饮IPOGPT-4o一夜被赶超&#xff0c;Anthropic推出Cl…

Rocky Linux archive下载地址

Index of /vault/rocky/https://dl.rockylinux.org/vault/rocky/

利口 202. 快乐数

力扣 202. 快乐数 编写一个算法来判断一个数 n 是不是快乐数。 「快乐数」 定义为&#xff1a; 对于一个正整数&#xff0c;每一次将该数替换为它每个位置上的数字的平方和。然后重复这个过程直到这个数变为 1&#xff0c;也可能是 无限循环 但始终变不到 1。如果这个过程 结…

猫头虎分享已解决Bug || Null Pointer Exception: `java.lang.NullPointerException`

猫头虎分享已解决Bug || Null Pointer Exception: java.lang.NullPointerException &#x1f63a;&#x1f42f; 关于猫头虎 大家好&#xff0c;我是猫头虎&#xff0c;别名猫头虎博主&#xff0c;擅长的技术领域包括云原生、前端、后端、运维和AI。我的博客主要分享技术教程…

营业性演出许可证:直播行业规范与繁荣的关键

随着互联网和直播行业的迅猛发展&#xff0c;营业性演出许可证的重要性愈加凸显。《网络表演经纪机构管理办法》明确指出&#xff0c;任何形式的盈利性演出活动&#xff0c;无论是线下还是线上&#xff0c;都必须取得营业性演出许可证。这一规定为行业规范提供了法律基础&#…

钓鱼隐藏--文件后缀压缩文件捆绑文件

免责声明:本文仅做技术交流与学习... 目录 文件后缀-钓鱼伪装-RLO 压缩文件-自解压-释放执行 捆绑文件-打包加载-释放执行 文件后缀-钓鱼伪装-RLO 改后缀--伪装 w.exe wgpj.exe (要改的后缀反写)(jpg--->gpj) | (光标移到要改的后缀的前边)(w和g中间) …

idea导入文件里面的子模块maven未识别处理解决办法

1、File → Project Structure → 点击“Modules” → 点击“” → “Import Model” 2、可以看到很多子模块&#xff0c;选择子模块下的 pom.xml 文件导入一个一个点累死了&#xff0c;父目录下也没有pom文件 解决办法&#xff1a;找到子模块中有一个pom.xml文件&#xff0c;…

CentOS9镜像下载地址加速下载

CentOS 9 是 CentOS 项目的最新版本之一&#xff0c;它基于 RHEL&#xff08;Red Hat Enterprise Linux&#xff09;9 的源代码构建。CentOS&#xff08;Community ENTerprise Operating System&#xff09;是一个免费的企业级 Linux 发行版&#xff0c;旨在提供一个与 RHEL 兼…

基于YOLOv5的PCB板缺陷检测系统的设计与实现

简介 随着电子设备的广泛应用,PCB(印刷电路板)作为其核心部件,其质量和可靠性至关重要。然而,PCB生产过程中常常会出现各种缺陷,如鼠咬伤、开路、短路、杂散、伪铜等。这些缺陷可能导致设备故障,甚至引发严重的安全问题。为了提高PCB检测的效率和准确性,我们基于YOLOv…