kafka ssl sasl_ssl 配置
ssl參考資料:
https://www.cnblogs.com/enhance/p/11233164.html
http://kafka.apache.org/documentation/#security_ssl
http://www.javacoder.cn/?p=867
https://www.cnblogs.com/lt-blogs/p/7154345.html
一、kafka ssl 配置
#兩臺機器 kafka_2.11-2.2.0.tgz centos7.7
kafka server kafka61.aaa.cn
kafka client kafka61.aaa.cn
ca伺服器 kafka63.ccc.cn
server 端:
# 生成伺服器keystore(金鑰和證照)
keytool -keystore server.keystore.jks -alias kafka61.aaa.cn -validity 365 -genkey -keyalg RSA -storetype pkcs12 -storepass 123456 -keypass 123456 -genkey -dname "C=cn,ST=shanghai,L=shanghai,O=sx,OU=zh,CN=kafka61.aaa.cn"
#上傳至ca:
scp *.jks kafka63.ccc.cn:/root/ssl/
client 端:
# 生成客戶端keystore(金鑰和證照)
keytool -keystore client.keystore.jks -alias kafka61.aaa.cn -validity 365 -genkey -keyalg RSA -storetype pkcs12 -storepass 123456 -keypass 123456 -genkey -dname "C=cn,ST=shanghai,L=shanghai,O=sx,OU=zh,CN=kafka61.aaa.cn"
#上傳至ca:
scp *.jks kafka63.ccc.cn:/root/ssl/
ca端:
# 建立CA證照
mkdir ssl;
cd ssl;
echo 01 > serial.txt
touch index.txt
openssl req -new -x509 -keyout ca.key -out ca.crt -days 365 -passout pass:123456 -subj "/C=cn/ST=shanghai/L=shanghai/O=sx/OU=zh/CN=kafka63.ccc.cn/emailAddress=13438230@qq.com"
# 將CA證照匯入到伺服器truststore祕鑰庫
keytool -keystore server.truststore.jks -alias kafka63.ccc.cn -import -file ca.crt -storepass 123456
# 將CA證照匯入到客戶端truststore祕鑰庫
keytool -keystore client.truststore.jks -alias kafka63.ccc.cn -import -file ca.crt -storepass 123456
# 從祕鑰庫匯出伺服器、客戶端證照
keytool -keystore server.keystore.jks -alias kafka61.aaa.cn -certreq -file cert-file -storepass 123456
keytool -keystore client.keystore.jks -alias kafka61.aaa.cn -certreq -file client-cert-file -storepass 123456
# 用CA證照給伺服器、客戶端證照籤名
openssl x509 -req -CA ca.crt -CAkey ca.key -in cert-file -out cert-signed -days 365 -CAcreateserial -passin pass:123456
openssl x509 -req -CA ca.crt -CAkey ca.key -in client-cert-file -out client-cert-signed -days 365 -CAcreateserial -passin pass:123456
# 將CA證照匯入伺服器keystore祕鑰庫
keytool -keystore server.keystore.jks -alias kafka63.ccc.cn -import -file ca.crt -storepass 123456
keytool -keystore client.keystore.jks -alias kafka63.ccc.cn -import -file ca.crt -storepass 123456
# 將已簽名的伺服器證照匯入keystore祕鑰庫
keytool -keystore server.keystore.jks -alias kafka61.aaa.cn -import -file cert-signed -storepass 123456
keytool -keystore client.keystore.jks -alias kafka61.aaa.cn -import -file client-cert-signed -storepass 123456
#上傳祕鑰庫到伺服器、客戶端
scp *.jks kafka61.aaa.cn:/home/kafka/kafka/
配置kafka伺服器
cat config/zookeeper.properties
dataDir=/home/kafka/kafka/zk/data
dataLogDir=/home/kafka/kafka/zk/logs
cat config/server.properties
listeners=PLAINTEXT://kafka61.aaa.cn:9092,SSL://kafka61.aaa.cn:9093
advertised.listeners=PLAINTEXT://kafka61.aaa.cn:9092,SSL://kafka61.aaa.cn:9093
ssl.keystore.location=/home/kafka/kafka/server.keystore.jks
ssl.keystore.password=123456
ssl.key.password=123456
ssl.truststore.location=/home/kafka/kafka/server.truststore.jks
ssl.truststore.password=123456
security.inter.broker.protocol=SSL
ssl.client.auth=requested
配置kafka客戶端
cat clientssl.conf
security.protocol=SSL
ssl.truststore.location=/home/kafka/kafka/client.truststore.jks
ssl.truststore.password=123456
ssl.keystore.location=/home/kafka/kafka/client.keystore.jks
ssl.keystore.password=123456
ssl.key.password=123456
啟動
bin/zookeeper-server-start.sh config/zookeeper.properties
bin/kafka-server-start.sh config/server.properties
驗證ssl
openssl s_client -debug -connect kafka61.aaa.cn:9093 -tls1
Verify return code: 19 (self signed certificate in certificate chain)
顯示一堆祕鑰,有以上語句,不影響正常使用
建立topic
bin/kafka-topics.sh --zookeeper kafka61.aaa.cn:2181 --create --topic test --partitions 1 --replication-factor 1
生產者
bin/kafka-console-producer.sh --broker-list kafka61.aaa.cn:9093 --topic test --producer.config clientssl.conf
消費者
bin/kafka-console-consumer.sh --bootstrap-server kafka61.aaa.cn:9093 --topic test --consumer.config clientssl.conf
sasl_ssl參考資料:
https://docs.vmware.com/en/VMware-Smart-Assurance/10.1.0/sa-ui-installation-config-guide-10.1.0/GUID-DF659094-60D3-4E1B-8D63-3DE3ED8B0EDF.html
https://www.orchome.com/1946
https://blog.csdn.net/ahzsg1314/article/details/54140909
二、sasl_ssl 配置
zookeeper配置
cat config/kafka_zoo_jaas.conf
ZKServer{
org.apache.kafka.common.security.plain.PlainLoginModule required
username="admin"
password="admin"
user_admin="admin";
};
ZKClient {
org.apache.zookeeper.server.auth.PlainLoginModule required
username="admin"
password="admin";
};
cat config/zookeeper.properties
authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
requireClientAuthScheme=sasl
jaasLoginRenew=3600000
cat bin/zookeeper-server-start.sh
export KAFKA_OPTS=" -Djava.security.auth.login.config=/home/kafka/kafka/config/kafka_zoo_jaas.conf -Dzookeeper.sasl.serverconfig=ZKServer -Dzookeeper.sasl.clientconfig=ZKClient"
啟動
bin/zookeeper-server-start.sh config/zookeeper.properties
kafka server 配置
cat config/kafka_server_jaas.conf
KafkaServer {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin"
user_admin="admin"
user_test="test";
};
ZKClient {
org.apache.zookeeper.server.auth.PlainLoginModule required
username="admin"
password="admin";
};
KafkaClient {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin";
};
Client {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin";
};
cat config/server.properties
listeners=SASL_SSL://kafka61.aaa.cn:9093
advertised.listeners=SASL_SSL://kafka61.aaa.cn:9093
security.inter.broker.protocol=SASL_SSL
sasl.enabled.mechanisms=SCRAM-SHA-512
sasl.mechanism.inter.broker.protocol=SCRAM-SHA-512
ssl.endpoint.identification.algorithm=HTTPS
authorizer.class.name=kafka.security.auth.SimpleAclAuthorizer
allow.everyone.if.no.acl.found=true
delete.topic.enable=true
auto.create.topics.enable=false
ssl.secure.random.implementation=SHA1PRNG
cat bin/kafka-server-start.sh //注意,宣告變數,放在執行啟動語句之前
export KAFKA_OPTS=" -Djava.security.auth.login.config=/home/kafka/kafka/config/kafka_server_jaas.conf -Dzookeeper.sasl.clientconfig=ZKClient "
建立SCRAM證照(儲存於zk)
bin/kafka-configs.sh --zookeeper localhost:2181 --alter --add-config 'SCRAM-SHA-256=[iterations=8192,password=admin],SCRAM-SHA-512=[password=admin]' --entity-type users --entity-name admin
檢視證照
bin/kafka-configs.sh --zookeeper localhost:2181 --describe --entity-type users --entity-name admin
啟動
bin/kafka-server-start.sh config/server.properties
kafka producer 配置
cat config/kafka_client_jaas.conf
KafkaClient {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin";
};
Client {
org.apache.kafka.common.security.scram.ScramLoginModule required
username="admin"
password="admin";
};
cat config/producer.properties
security.protocol=SASL_SSL
sasl.mechanism=SCRAM-SHA-512
cat clientssl.conf
security.protocol=SASL_SSL
sasl.mechanism=SCRAM-SHA-512
cat bin/kafka-console-producer.sh
export KAFKA_OPTS=" -Djava.security.auth.login.config=/home/kafka/kafka/config/kafka_client_jaas.conf"
啟動
bin/kafka-console-producer.sh --broker-list kafka61.aaa.cn:9093 --topic test --producer.config clientssl.conf
kafka consumer 配置
cat config/consumer.properties
security.protocol=SASL_SSL
sasl.mechanism=SCRAM-SHA-512
#sasl.jaas.config=org.apache.kafka.common.security.scram.ScramLoginModule required username="admin" password="admin";
cat config/consumer.properties
export KAFKA_OPTS=" -Djava.security.auth.login.config=/home/kafka/kafka/config/kafka_client_jaas.conf"
授權zk
bin/kafka-acls.sh --authorizer-properties zookeeper.connect=localhost:2181 --allow-principal User:admin --consumer --topic=test --group=* --add
檢視zk
bin/kafka-acls.sh --list --authorizer-properties zookeeper.connect=localhost:2181
啟動
bin/kafka-console-consumer.sh --bootstrap-server kafka61.aaa.cn:9093 --topic test --consumer.config clientssl.conf
相關文章
- Kafka SSL安裝與配置Kafka
- NGINX配置SSL支援Nginx
- Nginx的SSL配置優化 – HTTPS SSL 教程Nginx優化HTTP
- Kafka叢集配置Kafka
- Debezium kafka elasticsearch 配置KafkaElasticsearch
- SSL基礎知識及Nginx/Tomcat配置SSLNginxTomcat
- Flume 整合 Kafka_flume 到kafka 配置【轉】Kafka
- nginx docker容器配置https(ssl)NginxDockerHTTP
- HTTPS的SSL證書配置HTTP
- MySQL 5.7配置SSL連線MySql
- springboot整合kafka配置方式Spring BootKafka
- kafka:spark-project專案的kafka和scala配置KafkaSparkProject
- RMQ的SSL配置最佳實踐MQ
- Nginx使用SSL模組配置httpsNginxHTTP
- 如何在 Elasticsearch 中配置 SSL / TLS ?ElasticsearchTLS
- kafka部署配置與啟停Kafka
- Kafka 入門(一)--安裝配置和 kafka-python 呼叫KafkaPython
- nginx配置ssl加密(單/雙向認證、部分https) – HTTPS SSL 教程Nginx加密HTTP
- linux伺服器配置ssl證書Linux伺服器
- IDEA中maven配置忽略SSL檢查IdeaMaven
- SSL伺服器配置評級指南伺服器
- 伺服器配置ssl證書支援蘋果ATS方法 – HTTPS SSL 教程伺服器蘋果HTTP
- alpakka-kafka(5)-kafka叢集配置與分散式應用部署Kafka分散式
- keytool生成 和 商業SSL 並配置httpsHTTP
- SAP Fiori SSL 和 SAML 2.0 配置文件
- kafka的認識、安裝與配置Kafka
- Kafka SASL ACL配置踩坑總結Kafka
- apache 如何避免SSL配置影響原來HTTP的站點配置ApacheHTTP
- 【Spring Boot 使用記錄】kafka自動配置和自定義配置Spring BootKafka
- nginx配置ssl實現https訪問 小白文NginxHTTP
- phpstudy配置本地ssl證照,訪問https://127.0.0.1PHPHTTP127.0.0.1
- 怎麼給網站配置SSL證書(https)網站HTTP
- 詳解如何給Tomcat配置Https/ssl證書TomcatHTTP
- nginx配置SSL證書實現https服務NginxHTTP
- 阿里雲負載均衡SSL證書配置(更新)阿里負載
- Docker部署Halo部落格並配置SSL證書Docker
- Certbot配置免費的泛域名 SSL證書
- Kafka高可用叢集部署與配置指南Kafka