Skip to content

Commit

Permalink
Put an end to the 0.10.x (#25)
Browse files Browse the repository at this point in the history
Put an end to the 0.10.x
  • Loading branch information
gnuhpc authored May 5, 2019
2 parents d9fec3e + f6e9aa5 commit 87dd373
Show file tree
Hide file tree
Showing 58 changed files with 12,385 additions and 6,847 deletions.
9 changes: 9 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
language: java
jdk: oraclejdk8
install:
- mvn -N io.takari:maven:wrapper
- ./mvnw install -DskipTests=true -Dmaven.javadoc.skip=true -B -V
script:
- echo "skipping tests"
before_install:
- chmod +x mvnw
184 changes: 184 additions & 0 deletions JMXFilterTemplate/KafkaBrokerFilter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,184 @@
filters:
#
# Kafka volume metrics.
# Notice: BytesInPerSec, BytesOutPerSec and MessagesInPerSec are performance metrics.
#
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_VOLUMN
alias: BytesIn
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: BytesInPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_VOLUMN
alias: BytesOut
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: BytesOutPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_VOLUMN
alias: MessagesIn
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: MessagesInPerSec

#
# Kafka performance metrics
#
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=Produce
attribute:
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: RequestsPerSec_Produce
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchConsumer
attribute:
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: RequestsPerSec_FetchConsumer
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchFollower
attribute:
OneMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: RequestsPerSec_FetchFollower
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=Produce
attribute:
Mean:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_Produce_Mean
Count:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_Produce_Count
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchConsumer
attribute:
Mean:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_FetchConsumer_Mean
Count:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_FetchConsumer_Count
- include:
domain: kafka.network
bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchFollower
attribute:
Mean:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_FetchFollower_Mean
Count:
metric_type: KAFKA_BROKER_PERF
alias: TotalTimeMs_FetchFollower_Count
- include:
domain: kafka.network
bean: kafka.network:type=SocketServer,name=NetworkProcessorAvgIdlePercent
attribute:
Value:
metric_type: KAFKA_BROKER_PERF
alias: NetworkProcessorAvgIdlePercent
- include:
domain: kafka.server
bean: kafka.server:type=KafkaRequestHandlerPool,name=RequestHandlerAvgIdlePercent
attribute:
FiveMinuteRate:
metric_type: KAFKA_BROKER_PERF
alias: RequestHandlerAvgIdlePercent
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaFetcherManager,name=MaxLag,clientId=Replica
attribute:
Value:
metric_type: KAFKA_BROKER_PERF
alias: MaxLagBtwFollowerLeader
#
# Kafka availability metrics
#
- include:
domain: java.lang
bean: java.lang:type=Runtime
attribute:
Uptime:
metric_type: KAFKA_BROKER_AVAIL
alias: JVMUptime
- include:
domain: kafka.controller
bean: kafka.controller:type=KafkaController,name=OfflinePartitionsCount
attribute:
Value:
metric_type: KAFKA_BROKER_AVAIL
alias: OfflinePartitionsCount
- include:
domain: kafka.controller
bean: kafka.controller:type=KafkaController,name=ActiveControllerCount
attribute:
Value:
metric_type: KAFKA_BROKER_AVAIL
alias: ActiveControllerCount
- include:
domain: kafka.controller
bean: kafka.controller:type=ControllerStats,name=LeaderElectionRateAndTimeMs
attribute:
Count:
metric_type: KAFKA_BROKER_AVAIL
alias: LeaderElectionRateAndTimeMs
- include:
domain: kafka.controller
bean: kafka.controller:type=ControllerStats,name=UncleanLeaderElectionsPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_AVAIL
alias: UncleanLeaderElectionsPerSec
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaManager,name=UnderReplicatedPartitions
attribute:
Value:
metric_type: KAFKA_BROKER_AVAIL
alias: UnderReplicatedPartitions
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaManager,name=PartitionCount
attribute:
Value:
metric_type: KAFKA_BROKER_AVAIL
alias: PartitionCount
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaManager,name=IsrShrinksPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_AVAIL
alias: IsrShrinksPerSec
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaManager,name=IsrExpandsPerSec
attribute:
Count:
metric_type: KAFKA_BROKER_AVAIL
alias: IsrExpandsPerSec
- include:
domain: kafka.server
bean: kafka.server:type=ReplicaManager,name=LeaderCount
attribute:
Value:
metric_type: KAFKA_BROKER_AVAIL
alias: LeaderCount
43 changes: 43 additions & 0 deletions JMXFilterTemplate/KafkaConsumerFilter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
filters:
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=([-.\w]+)
attribute:
Value:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: MaxLag
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MinFetchRate,clientId=([-.\w]+)
attribute:
Value:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: MinFetchRate
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=MessagesPerSec,clientId=([-.\w]+)
attribute:
Count:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: MessagesPerSec
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=BytesPerSec,clientId=([-.\w]+)
attribute:
Count:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: BytesPerSec
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=([-.\w]+)
attribute:
Count:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: KafkaCommitsPerSec
- include:
domain: kafka.consumer
bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=OwnedPartitionsCount,clientId=([-.\w]+),groupId=([-.\w]+)
attribute:
Value:
metric_type: KAFKA_CONSUMER_OLD_HIGH
alias: OwnedPartitionsCount
20 changes: 20 additions & 0 deletions JMXFilterTemplate/KafkaProducerFilter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
filters:
- include:
domain: kafka.producer
bean_regex: kafka.producer:type=producer-metrics,client-id=([-.\w]+)
attribute:
request-rate:
metric_type: KAFKA_PRODUCER
alias: request-rate
request-size-avg:
metric_type: KAFKA_PRODUCER
alias: request-size-avg
# There is some problem. I can't find producer-topic-metrics in Kafka 0.10
- include:
domain: kafka.producer
bean_regex: kafka.producer:type=producer-topic-metrics,client-id=([-.\w]+)
attribute:
byte-rate:
metric_type: KAFKA_PRODUCER
alias: byte-rate-topic

57 changes: 57 additions & 0 deletions JMXFilterTemplate/KafkaTopicFilter.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
filters:
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: BytesInPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: BytesOutPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: MessagesInPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=BytesRejectedPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: BytesRejectedPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=FailedFetchRequestsPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: FailedFetchRequestsPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=FailedProduceRequestsPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: FailedProduceRequestsPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=TotalFetchRequestsPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: TotalFetchRequestsPerSec
- include:
domain: kafka.server
bean: kafka.server:type=BrokerTopicMetrics,name=TotalProduceRequestsPerSec,topic={topicname}
attribute:
Count:
metric_type: KAFKA_TOPIC_PERF
alias: TotalProduceRequestsPerSec
43 changes: 42 additions & 1 deletion README.adoc
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
= Kafka REST API

image:https://travis-ci.org/gnuhpc/Kafka-zk-restapi.svg?branch=master["Build Status", link="https://travis-ci.org/gnuhpc/Kafka-zk-restapi"]

[[_overview]]
== Overview
Kafka/ZK REST API is to provide the production-ready endpoints to perform some administration/metric task for Kafka and Zookeeper.
Expand All @@ -13,6 +15,9 @@ Kafka/ZK REST API is to provide the production-ready endpoints to perform some a
* Consumer group(old zookeeper based/new kafka based) list/describe
* Offset check/reset
* Consumer Group Lag check
* Collect JMX metrics from brokers that expose JMX metrics +
More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification]
* Secure the REST API with Spring Security
// end::base-t[]

image::https://raw.githubusercontent.com/gnuhpc/Kafka-zk-restapi/master/pics/ShowApi.png[API]
Expand All @@ -36,14 +41,48 @@ Change the following settings of application-home.yml in src/main/resources to v
kafka.brokers
zookeeper.uris

Change security related parameters in application config file:
See below: <<_security,'How to config security'>>

server.security.check
server.security.checkInitDelay
server.security.checkSecurityInterval

If you want to use JMX Query Filter function, you can add your own custom filter files to JMXFilterTemplate directory in project root folder.
More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification]

#mvn clean package -Dmaven.test.skip=true+

You will find zip/tar under directory: Kafka-zk-restapi/target

You can get it running by unzip/untaring the distribution package and run bin/start.sh

[[_security]]
=== Security
Public REST services without access control make the sensitive data under risk.Then we provide a simple authentication mechanism using Spring Security.
In order to make the project lighter, we use yml file to store user information, not using database.

Follow the steps to enable security feature:

Step 1:Modify the application config file and set server.security.check to true. +

* server.security.check:
** True: Add security for the API. Clients can access the API with valid username and password stored in security.yml, or the Swagger UI(http://127.0.0.1:8121/api) is only
allowed to access.
** False: All the endpoints can be accessed without authentication.
* server.security.checkInitDelay: The number of seconds of init delay for the timing thread to check the security file.
* server.security.checkSecurityInterval: The number of seconds of check interval for the timing thread to check the security file.

Step 2: Make sure security/security.yml exist in application root folder.

Step 3: Use user controller API to add user to security file security/security.yml. +
**Notice**:

* The first user should be added manually. Password need to be encoded using bcrypt before saving to the yml file.For convenience, we provide CommonUtils to encode the password.
* No need to restart server after adding new user or update user info. Timing thread introduced in Step 1 will refresh the user list according to your settings.

=== Support Kafka Version Information
Currently, this rest api (master branch) supports Kafka 0.10.x brokers. The master branch is the most active branch.
Currently, this rest api (master branch) supports Kafka 0.10.x brokers. The master branch is the most active branch. We're going to get down to the work of supporting the Kafka 1.x version.

*For 0.11.x, please checkout the branch 0.11.x by calling the command:*

Expand All @@ -63,6 +102,8 @@ You can access Swagger-UI by accessing http://127.0.0.1:8121/api

* kafka-controller : Kafka Api
* zookeeper-controller : Zookeeper Api
* collector-controller : JMX Metric Collector Api
* user-controller : User management Api


=== https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/definitions.adoc[Data Model Definitions for 0.10]
Expand Down
1 change: 1 addition & 0 deletions _config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
theme: jekyll-theme-cayman
Loading

0 comments on commit 87dd373

Please sign in to comment.