Golang kafka build by docker-compose

本文介绍了如何利用Docker Compose在MacOS上构建Golang Kafka项目。首先,通过启动服务并忽略日志,然后分别使用特定命令初始化生产者和消费者项目。最后,提醒读者这只是基础实践,最佳配置需结合实际项目需求,并推荐参考相关资料进行深入学习。

摘要生成于 C知道 ,由 DeepSeek-R1 满血版支持, 前往体验 >

  • 使用docker-compose拉起kafkadocker-compose.yaml
version: '3.2'
services:
  zookeeper:
    image: wurstmeister/zookeeper
    container_name: zookeeper
    ports:
      - "2181:2181"
    restart: always
  kafka:
    image: wurstmeister/kafka
    container_name: kafka
    links: 
      - zookeeper
    ports:
      - "9092:9092"
    environment:
      - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
      - KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://127.0.0.1:9092
      - KAFKA_LISTENERS=PLAINTEXT://:9092
    volumes:
      - /var/run/docker.sock:/var/run/docker.sock
    restart: always
    depends_on: 
      - zookeeper
  kafka-manager:
    image: sheepkiller/kafka-manager:latest
    ports:
      - "9000:9000"
    links:
      - zookeeper
      - kafka
    environment:
      ZK_HOSTS: zookeeper:2181
      APPLICATION_SECRET: letmein
      KM_ARGS: -Djava.net.preferIPv4Stack=true
    depends_on: 
      - kafka
      - zookeeper
  • 启动kafka服务:在docker-compose.yaml文件统计目录:docker-compose up (不想看日志的: docker-compose up -d)

  • 使用 go mod init my_produser 初始化一个项目, main.go

package main

import (
	"bytes"
	"encoding/json"
	"fmt"
	"time"

	"github.com/Shopify/sarama"
)

func main() {
	startProduce()
}

var (
	producer sarama.SyncProducer
	brokers  = []string{"127.0.0.1:9092"}
	topic    = "test_topic_1"
)

func init() {
	config := sarama.NewConfig()
	config.Producer.RequiredAcks = sarama.WaitForLocal
	config.Producer.Retry.Max = 5
	config.Producer.Return.Successes = true
	brokers := brokers
	var err error
	producer, err = sarama.NewSyncProducer(brokers, config)
	if err != nil {
		fmt.Printf("init producer failed -> %v \n", err)
		panic(err)
	} else {
		fmt.Println("producer init success")
	}
}

func produceMsg(msg string) {
	msgX := &sarama.ProducerMessage{
		Topic: topic,
		Value: sarama.StringEncoder(msg),
	}
	fmt.Printf("SendMsg -> %v\n", dumpString(msgX))

	partition, offset, err := producer.SendMessage(msgX)
	if err != nil {
		fmt.Printf("send msg error:%s \n", err)
	} else {
		fmt.Printf("msg send success, message is stored in topic(%s)/partition(%d)/offset(%d)\n", topic, partition, offset)
	}
}

func startProduce() {
	tick := time.Tick(2 * time.Second)
	for {
		time.Sleep(5 * time.Second)
		select {
		case <-tick:
			t := time.Now().Unix() * 1000
			msg := fmt.Sprintf("{\"timestamp\":%d}--cass0", t)
			produceMsg(msg)
		}
	}
}

//解析为json字符串
func dumpString(v interface{}) (str string) {

	bs, err := json.Marshal(v)
	b := bytes.Buffer{}
	if err != nil {
		b.WriteString("{err:\"json format error.")
		b.WriteString(err.Error())
		b.WriteString("\"}")
	} else {
		b.Write(bs)
	}
	str = b.String()
	return str
}
  • 运行生产者

  • 使用 go mod init my_comsumer 初始化一个项目, main.go

package main

import (
	"fmt"
	"time"

	"github.com/Shopify/sarama"
	cluster "github.com/bsm/sarama-cluster"
)

var (
	kafkaConsumer *cluster.Consumer
	kafkaBrokers  = []string{"127.0.0.1:9092"}
	kafkaTopic    = "test_topic_1"
	groupId       = "csdn_test_1"
)

func init() {
	var err error
	config := cluster.NewConfig()
	config.Consumer.Return.Errors = true
	config.Group.Return.Notifications = true
	config.Consumer.Group.Rebalance.Strategy = sarama.BalanceStrategyRange
	config.Consumer.Offsets.Initial = -2
	config.Consumer.Offsets.CommitInterval = 1 * time.Second
	config.Group.Return.Notifications = true
	kafkaConsumer, err = cluster.NewConsumer(kafkaBrokers, groupId, []string{kafkaTopic}, config)
	if err != nil {
		panic(err.Error())
	}
	if kafkaConsumer == nil {
		panic(fmt.Sprintf("consumer is nil. kafka info -> {brokers:%v, topic: %v, group: %v}", kafkaBrokers, kafkaTopic, groupId))
	}
	fmt.Printf("kafka init success, consumer -> %v, topic -> %v, ", kafkaConsumer, kafkaTopic)
}

func main() {
	for {
		select {
		case msg, ok := <-kafkaConsumer.Messages():
			if ok {
				fmt.Printf("kafka msg: %s \n", msg.Value)
				kafkaConsumer.MarkOffset(msg, "")
			} else {
				fmt.Printf("kafka 监听服务失败")
			}
		case err, ok := <-kafkaConsumer.Errors():
			if ok {
				fmt.Printf("consumer error: %v", err)
			}
		case ntf, ok := <-kafkaConsumer.Notifications():
			if ok {
				fmt.Printf("consumer notification: %v", ntf)
			}
		}
	}
}
  • 运行消费者
最后
  1. 这是一个基础的使用,需要得到最佳实践还需要根据自己的项目来配置,去kafka看看
  2. 借鉴1 借鉴2
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值