diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..97b3ea2 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,9 @@ +language: java +jdk: oraclejdk8 +install: + - mvn -N io.takari:maven:wrapper + - ./mvnw install -DskipTests=true -Dmaven.javadoc.skip=true -B -V +script: +- echo "skipping tests" +before_install: + - chmod +x mvnw diff --git a/JMXFilterTemplate/KafkaBrokerFilter.yml b/JMXFilterTemplate/KafkaBrokerFilter.yml new file mode 100644 index 0000000..aff723b --- /dev/null +++ b/JMXFilterTemplate/KafkaBrokerFilter.yml @@ -0,0 +1,184 @@ +filters: + # + # Kafka volume metrics. + # Notice: BytesInPerSec, BytesOutPerSec and MessagesInPerSec are performance metrics. + # + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_VOLUMN + alias: BytesIn + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: BytesInPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_VOLUMN + alias: BytesOut + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: BytesOutPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_VOLUMN + alias: MessagesIn + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: MessagesInPerSec + + # + # Kafka performance metrics + # + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=Produce + attribute: + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: RequestsPerSec_Produce + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchConsumer + attribute: + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: RequestsPerSec_FetchConsumer + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=RequestsPerSec,request=FetchFollower + attribute: + OneMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: RequestsPerSec_FetchFollower + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=Produce + attribute: + Mean: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_Produce_Mean + Count: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_Produce_Count + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchConsumer + attribute: + Mean: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_FetchConsumer_Mean + Count: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_FetchConsumer_Count + - include: + domain: kafka.network + bean: kafka.network:type=RequestMetrics,name=TotalTimeMs,request=FetchFollower + attribute: + Mean: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_FetchFollower_Mean + Count: + metric_type: KAFKA_BROKER_PERF + alias: TotalTimeMs_FetchFollower_Count + - include: + domain: kafka.network + bean: kafka.network:type=SocketServer,name=NetworkProcessorAvgIdlePercent + attribute: + Value: + metric_type: KAFKA_BROKER_PERF + alias: NetworkProcessorAvgIdlePercent + - include: + domain: kafka.server + bean: kafka.server:type=KafkaRequestHandlerPool,name=RequestHandlerAvgIdlePercent + attribute: + FiveMinuteRate: + metric_type: KAFKA_BROKER_PERF + alias: RequestHandlerAvgIdlePercent + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaFetcherManager,name=MaxLag,clientId=Replica + attribute: + Value: + metric_type: KAFKA_BROKER_PERF + alias: MaxLagBtwFollowerLeader + # + # Kafka availability metrics + # + - include: + domain: java.lang + bean: java.lang:type=Runtime + attribute: + Uptime: + metric_type: KAFKA_BROKER_AVAIL + alias: JVMUptime + - include: + domain: kafka.controller + bean: kafka.controller:type=KafkaController,name=OfflinePartitionsCount + attribute: + Value: + metric_type: KAFKA_BROKER_AVAIL + alias: OfflinePartitionsCount + - include: + domain: kafka.controller + bean: kafka.controller:type=KafkaController,name=ActiveControllerCount + attribute: + Value: + metric_type: KAFKA_BROKER_AVAIL + alias: ActiveControllerCount + - include: + domain: kafka.controller + bean: kafka.controller:type=ControllerStats,name=LeaderElectionRateAndTimeMs + attribute: + Count: + metric_type: KAFKA_BROKER_AVAIL + alias: LeaderElectionRateAndTimeMs + - include: + domain: kafka.controller + bean: kafka.controller:type=ControllerStats,name=UncleanLeaderElectionsPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_AVAIL + alias: UncleanLeaderElectionsPerSec + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaManager,name=UnderReplicatedPartitions + attribute: + Value: + metric_type: KAFKA_BROKER_AVAIL + alias: UnderReplicatedPartitions + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaManager,name=PartitionCount + attribute: + Value: + metric_type: KAFKA_BROKER_AVAIL + alias: PartitionCount + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaManager,name=IsrShrinksPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_AVAIL + alias: IsrShrinksPerSec + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaManager,name=IsrExpandsPerSec + attribute: + Count: + metric_type: KAFKA_BROKER_AVAIL + alias: IsrExpandsPerSec + - include: + domain: kafka.server + bean: kafka.server:type=ReplicaManager,name=LeaderCount + attribute: + Value: + metric_type: KAFKA_BROKER_AVAIL + alias: LeaderCount diff --git a/JMXFilterTemplate/KafkaConsumerFilter.yml b/JMXFilterTemplate/KafkaConsumerFilter.yml new file mode 100644 index 0000000..094ef7e --- /dev/null +++ b/JMXFilterTemplate/KafkaConsumerFilter.yml @@ -0,0 +1,43 @@ +filters: + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MaxLag + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MinFetchRate,clientId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MinFetchRate + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=MessagesPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MessagesPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=BytesPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: BytesPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: KafkaCommitsPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=OwnedPartitionsCount,clientId=([-.\w]+),groupId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: OwnedPartitionsCount \ No newline at end of file diff --git a/JMXFilterTemplate/KafkaProducerFilter.yml b/JMXFilterTemplate/KafkaProducerFilter.yml new file mode 100644 index 0000000..2d5de8a --- /dev/null +++ b/JMXFilterTemplate/KafkaProducerFilter.yml @@ -0,0 +1,20 @@ +filters: + - include: + domain: kafka.producer + bean_regex: kafka.producer:type=producer-metrics,client-id=([-.\w]+) + attribute: + request-rate: + metric_type: KAFKA_PRODUCER + alias: request-rate + request-size-avg: + metric_type: KAFKA_PRODUCER + alias: request-size-avg + # There is some problem. I can't find producer-topic-metrics in Kafka 0.10 + - include: + domain: kafka.producer + bean_regex: kafka.producer:type=producer-topic-metrics,client-id=([-.\w]+) + attribute: + byte-rate: + metric_type: KAFKA_PRODUCER + alias: byte-rate-topic + diff --git a/JMXFilterTemplate/KafkaTopicFilter.yml b/JMXFilterTemplate/KafkaTopicFilter.yml new file mode 100644 index 0000000..361f827 --- /dev/null +++ b/JMXFilterTemplate/KafkaTopicFilter.yml @@ -0,0 +1,57 @@ +filters: + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=BytesInPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: BytesInPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=BytesOutPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: BytesOutPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=MessagesInPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: MessagesInPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=BytesRejectedPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: BytesRejectedPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=FailedFetchRequestsPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: FailedFetchRequestsPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=FailedProduceRequestsPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: FailedProduceRequestsPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=TotalFetchRequestsPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: TotalFetchRequestsPerSec + - include: + domain: kafka.server + bean: kafka.server:type=BrokerTopicMetrics,name=TotalProduceRequestsPerSec,topic={topicname} + attribute: + Count: + metric_type: KAFKA_TOPIC_PERF + alias: TotalProduceRequestsPerSec \ No newline at end of file diff --git a/README.adoc b/README.adoc index a55ecf5..ef78750 100644 --- a/README.adoc +++ b/README.adoc @@ -1,5 +1,7 @@ = Kafka REST API +image:https://travis-ci.org/gnuhpc/Kafka-zk-restapi.svg?branch=master["Build Status", link="https://travis-ci.org/gnuhpc/Kafka-zk-restapi"] + [[_overview]] == Overview Kafka/ZK REST API is to provide the production-ready endpoints to perform some administration/metric task for Kafka and Zookeeper. @@ -13,6 +15,9 @@ Kafka/ZK REST API is to provide the production-ready endpoints to perform some a * Consumer group(old zookeeper based/new kafka based) list/describe * Offset check/reset * Consumer Group Lag check +* Collect JMX metrics from brokers that expose JMX metrics + + More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification] +* Secure the REST API with Spring Security // end::base-t[] image::https://raw.githubusercontent.com/gnuhpc/Kafka-zk-restapi/master/pics/ShowApi.png[API] @@ -36,14 +41,48 @@ Change the following settings of application-home.yml in src/main/resources to v kafka.brokers zookeeper.uris +Change security related parameters in application config file: +See below: <<_security,'How to config security'>> + +server.security.check +server.security.checkInitDelay +server.security.checkSecurityInterval + +If you want to use JMX Query Filter function, you can add your own custom filter files to JMXFilterTemplate directory in project root folder. +More details refer to https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/JMXCollector.adoc[JMXCollector API Specification] + #mvn clean package -Dmaven.test.skip=true+ You will find zip/tar under directory: Kafka-zk-restapi/target You can get it running by unzip/untaring the distribution package and run bin/start.sh +[[_security]] +=== Security +Public REST services without access control make the sensitive data under risk.Then we provide a simple authentication mechanism using Spring Security. +In order to make the project lighter, we use yml file to store user information, not using database. + +Follow the steps to enable security feature: + +Step 1:Modify the application config file and set server.security.check to true. + + +* server.security.check: + ** True: Add security for the API. Clients can access the API with valid username and password stored in security.yml, or the Swagger UI(http://127.0.0.1:8121/api) is only + allowed to access. + ** False: All the endpoints can be accessed without authentication. +* server.security.checkInitDelay: The number of seconds of init delay for the timing thread to check the security file. +* server.security.checkSecurityInterval: The number of seconds of check interval for the timing thread to check the security file. + +Step 2: Make sure security/security.yml exist in application root folder. + +Step 3: Use user controller API to add user to security file security/security.yml. + +**Notice**: + +* The first user should be added manually. Password need to be encoded using bcrypt before saving to the yml file.For convenience, we provide CommonUtils to encode the password. +* No need to restart server after adding new user or update user info. Timing thread introduced in Step 1 will refresh the user list according to your settings. + === Support Kafka Version Information -Currently, this rest api (master branch) supports Kafka 0.10.x brokers. The master branch is the most active branch. +Currently, this rest api (master branch) supports Kafka 0.10.x brokers. The master branch is the most active branch. We're going to get down to the work of supporting the Kafka 1.x version. *For 0.11.x, please checkout the branch 0.11.x by calling the command:* @@ -63,6 +102,8 @@ You can access Swagger-UI by accessing http://127.0.0.1:8121/api * kafka-controller : Kafka Api * zookeeper-controller : Zookeeper Api +* collector-controller : JMX Metric Collector Api +* user-controller : User management Api === https://github.com/gnuhpc/Kafka-zk-restapi/blob/master/docs/definitions.adoc[Data Model Definitions for 0.10] diff --git a/_config.yml b/_config.yml new file mode 100644 index 0000000..c419263 --- /dev/null +++ b/_config.yml @@ -0,0 +1 @@ +theme: jekyll-theme-cayman \ No newline at end of file diff --git a/docs/JMXCollector.adoc b/docs/JMXCollector.adoc new file mode 100644 index 0000000..0d57fcb --- /dev/null +++ b/docs/JMXCollector.adoc @@ -0,0 +1,360 @@ += JMX Collector Rest API + +== Overview +JMX Collector Rest API provides two APIs to collect JMX metrics from applications that expose JMX metrics. + +* V1: Collect all the JMX metric data. + +* V2: Higher level API to collect the JMX metric data by querying with the filters. You can query the metrics which you want to include or exclude. + +== V1 API + +=== How to visit V1 API +Visit the service through HTTP GET and provide a name "jmxurl" String parameter as follows: +[source, html] +---- +http://localhost:8121/jmx/v1?jmxurl=127.0.0.1:19999,127.0.0.1:29999 +---- +*Notice:* Parameter "jmxurl" should be a comma-separated list of {IP:Port} or set to 'default'. The list should match the following regex. If set to default, it will use the value of the variable "jmx.kafka.jmxurl" that defined in the application config file. +[source, java] +---- + private static final String IP_AND_PORT_LIST_REGEX = "(([0-9]+(?:\\.[0-9]+){3}:[0-9]+,)*([0-9]+(?:\\.[0-9]+){3}:[0-9]+)+)|(default)"; +---- + +=== V1 API JSON Format Response +Response from the service is a list of object that in JSON format. Each JSON object includes the following fields: + +* host: The "host" field is composited of IP and exposed JMX Port. +* timestamp: Time when collect. For easier reading, the "timestamp" field is transformed to the format "yyyy-MM-dd HH:mm:ss". +* collected: If the collection is successful, the filed "collected" will return true and return false otherwise. +* mbeanInfo: JMX metric data. It's a dictionary that key is the JMX bean name and value is the attribute info dictionary. The mbeanInfo will be empty when "collected" return false. +* msg: The error message when collecting the JMX metrics. + +==== Sample Response for success +[source, json] +---- +[ + { + "host": "127.0.0.1:19999", + "timestamp": "2018-04-10 00:13:16", + "collected": true, + "mbeanInfo": { + "kafka.network:type=RequestMetrics,name=ResponseQueueTimeMs,request=FetchFollower": { + "75thPercentile": "0.0", + "Mean": "0.2777777777777778", + "StdDev": "0.7911877721292356", + "98thPercentile": "3.69999999999996", + "Min": "0.0", + "99thPercentile": "6.0", + "95thPercentile": "1.0", + "Max": "6.0", + "999thPercentile": "6.0", + "Count": "72", + "50thPercentile": "0.0" + }, + "kafka.server:type=ReplicaFetcherManager,name=MinFetchRate,clientId=Replica": { + "Value": "1.8566937378852422" + } + ... + } + }, + { + "host": "127.0.0.1:29999", + "timestamp": "2018-04-10 00:14:16", + "collected": true, + "mbeanInfo": { + ... + } + } +] +---- +==== Sample Response for failure +[source, json] +---- +[ + { + "host": "127.0.0.1:19999", + "timestamp": "2018-04-10 14:18:28", + "collected": false, + "mbeanInfo": {}, + "msg": "org.gnuhpc.bigdata.exception.CollectorException occurred. URL: service:jmx:rmi:///jndi/rmi://127.0.0.1:19999/jmxrmi. Reason: java.rmi.ConnectException: Connection refused to host: 192.168.1.106; nested exception is: \n\tjava.net.ConnectException: Operation timed out" + }, + { + "host": "127.0.0.1:29999", + "timestamp": "2018-04-10 14:21:06", + "collected": false, + "mbeanInfo": {}, + "msg": "org.gnuhpc.bigdata.exception.CollectorException occurred. URL: service:jmx:rmi:///jndi/rmi://127.0.0.1:29999/jmxrmi. Reason: java.rmi.ConnectException: Connection refused to host: 192.168.1.106; nested exception is: \n\tjava.net.ConnectException: Operation timed out" + } +] +---- + +== V2 API +If you only want collect some metrics, not all of them, then choose V2 API. + +=== How to visit V2 API:/jmx/v2 +Visit the service through HTTP POST. Provide a name "jmxurl" String parameter and put the JSON query filter into the RequestBody as follows: +[source, html] +---- +http://localhost:8121/jmx/v2?jmxurl=127.0.0.1:19999,127.0.0.1:29999 +RequesBody: +{ + "filters":[ + { + "include":{ + "domain":"kafka.server", + "bean":["kafka.server:type=BrokerTopicMetrics,name=TotalProduceRequestsPerSec"], + "attribute":["OneMinuteRate", "FiveMinuteRate"] + }, + "exclude":{ + + } + } + ] +} +---- +==== Instruction: Query Filter +Query filter is used to define the query conditions. The field "filters" is a list of parallel query configurations. +Only 2 keys are allowed in each query configuration: + +* include (mandatory): Dictionary of JMX filter. Any attribute that matches these filters will be collected unless it also matches the “exclude” filters (see below) +* exclude (optional): Dictionary of JMX filter. Attributes that match these filters won’t be collected + +Each include or exclude dictionary supports the following keys: + +* domain: a list of domain names (e.g. java.lang) +* domain_regex: a list of regexes on the domain name (e.g. java\.lang.*) +* bean or bean_name: A list of full bean names (e.g. java.lang:type=Compilation) +* bean_regex: A list of regexes on the full bean names (e.g. java\.lang.*[,:]type=Compilation.*) +* attribute: It can accept two types of values: a dictionary whose keys are attributes names or a list of attributes names + +You can freely customize the query conditions, and you can also use the filter template for convenience(See the below for details.) + +==== Response of V2 API /jmx/v2 +Response from the service is a list of object that in JSON format. Each JSON object includes the following fields: + +* host: The "host" field is composited of IP and exposed JMX Port. +* timestamp: Time when collect. For easier reading, the "timestamp" field is transformed to the format "yyyy-MM-dd HH:mm:ss". +* collected: If the collection is successful, the filed "collected" will return true and return false otherwise. +* metrics: JMX metric data. It's a list of dictionary that includes following keys. + + ** domain: domain name of the metric + ** metric_type: metric type that defined in the "attribute" field of query field. Default value is "gauge". + ** alias: metric alias that defined in the "attribute" field of query filter + ** beanName: bean name of the metric + ** attributeName: attribute name of the metric + ** value: metric value +* msg: The error message when collecting the JMX metrics. + +Sample response is as follows: +[source, json] +---- +[ + { + "host": "127.0.0.1:4444", + "timestamp": "2018-04-04 22:40:18", + "collected": true, + "metrics": [ + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "owned_partitions_count", + "beanName": "kafka.consumer:clientId=console-consumer-4251,groupId=console-consumer-4251,name=OwnedPartitionsCount,type=ZookeeperConsumerConnector", + "attributeName": "Value", + "value": 3 + }, + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "messages_per_sec", + "beanName": "kafka.consumer:clientId=console-consumer-4251,name=MessagesPerSec,type=ConsumerTopicMetrics", + "attributeName": "Count", + "value": 0 + }, + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "min_fetch_rate", + "beanName": "kafka.consumer:clientId=console-consumer-4251,name=MinFetchRate,type=ConsumerFetcherManager", + "attributeName": "Value", + "value": 9.7817371514609 + }, + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "kafka_commits_per_sec", + "beanName": "kafka.consumer:clientId=console-consumer-4251,name=KafkaCommitsPerSec,type=ZookeeperConsumerConnector", + "attributeName": "Count", + "value": 0 + }, + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "bytes_per_sec", + "beanName": "kafka.consumer:clientId=console-consumer-4251,name=BytesPerSec,type=ConsumerTopicMetrics", + "attributeName": "Count", + "value": 0 + }, + { + "domain": "kafka.consumer", + "metric_type": "consumer", + "alias": "maxlag", + "beanName": "kafka.consumer:clientId=console-consumer-4251,name=MaxLag,type=ConsumerFetcherManager", + "attributeName": "Value", + "value": 0 + } + ], + "msg": null + } +] +---- + +=== How to visit V2 API:/jmx/v2/filters +Specific applications have their own JMX metrics, then we developed some filter templates such as KafkaBrokerFilter, KafkaConsumerFilter and KafkaProducerFilter. + +This API helps list the query filter templates with the filterKey(not case sensitive). If filterKey is set to empty, it will return all the templates. +[source, html] +---- +http://localhost:8121/jmx/v2/filters?filterKey=consumer +---- + +The response is as follows: +[source, json] +---- +{ + "KafkaConsumerFilter": { + "filters": [ + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=([-.\\w]+)", + "attribute": { + "Value": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "MaxLag" + } + } + } + }, + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ConsumerFetcherManager,name=MinFetchRate,clientId=([-.\\w]+)", + "attribute": { + "Value": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "MinFetchRate" + } + } + } + }, + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ConsumerTopicMetrics,name=MessagesPerSec,clientId=([-.\\w]+)", + "attribute": { + "Count": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "MessagesPerSec" + } + } + } + }, + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ConsumerTopicMetrics,name=BytesPerSec,clientId=([-.\\w]+)", + "attribute": { + "Count": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "BytesPerSec" + } + } + } + }, + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=([-.\\w]+)", + "attribute": { + "Count": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "KafkaCommitsPerSec" + } + } + } + }, + { + "include": { + "domain": "kafka.consumer", + "bean_regex": "kafka.consumer:type=ZookeeperConsumerConnector,name=OwnedPartitionsCount,clientId=([-.\\w]+),groupId=([-.\\w]+)", + "attribute": { + "Value": { + "metric_type": "KAFKA_CONSUMER_OLD_HIGH", + "alias": "OwnedPartitionsCount" + } + } + } + } + ] + } +} +---- + +==== How to add filter template +You can add filter template yml files to JMXFilterTemplate directory in project root folder. The fields of the file are the same with the query filter that noticed above. + +Sample filter template is as follows: +[source, yml] +---- +filters: + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MaxLag,clientId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MaxLag + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerFetcherManager,name=MinFetchRate,clientId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MinFetchRate + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=MessagesPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: MessagesPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ConsumerTopicMetrics,name=BytesPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: BytesPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=KafkaCommitsPerSec,clientId=([-.\w]+) + attribute: + Count: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: KafkaCommitsPerSec + - include: + domain: kafka.consumer + bean_regex: kafka.consumer:type=ZookeeperConsumerConnector,name=OwnedPartitionsCount,clientId=([-.\w]+),groupId=([-.\w]+) + attribute: + Value: + metric_type: KAFKA_CONSUMER_OLD_HIGH + alias: OwnedPartitionsCount +---- + + + + + + + diff --git a/docs/definitions.adoc b/docs/definitions.adoc index 973f0a9..8369b1e 100644 --- a/docs/definitions.adoc +++ b/docs/definitions.adoc @@ -1,268 +1,399 @@ - -[[_definitions]] -== Definitions - -[[_addpartition]] -=== AddPartition - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**numPartitionsAdded** + -__optional__|integer(int32) -|**replicaAssignment** + -__optional__|string -|**topic** + -__optional__|string -|=== - - -[[_brokerinfo]] -=== BrokerInfo - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**endPoints** + -__optional__|< string > array -|**host** + -__optional__|string -|**id** + -__optional__|integer(int32) -|**jmxPort** + -__optional__|integer(int32) -|**port** + -__optional__|integer(int32) -|**rack** + -__optional__|string -|**securityProtocol** + -__optional__|object -|**startTime** + -__optional__|string(date-time) -|**version** + -__optional__|integer(int32) -|=== - - -[[_consumergroupdesc]] -=== ConsumerGroupDesc - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**consumerId** + -__optional__|string -|**currentOffset** + -__optional__|integer(int64) -|**groupName** + -__optional__|string -|**host** + -__optional__|string -|**lag** + -__optional__|integer(int64) -|**logEndOffset** + -__optional__|integer(int64) -|**partitionId** + -__optional__|integer(int32) -|**state** + -__optional__|enum (RUNNING, PENDING) -|**topic** + -__optional__|string -|**type** + -__optional__|enum (NEW, OLD) -|=== - - -[[_generalresponse]] -=== GeneralResponse - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**msg** + -__optional__|string -|**state** + -__optional__|enum (success, failure) -|=== - - -[[_hostandport]] -=== HostAndPort - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**hostText** + -__optional__|string -|**port** + -__optional__|integer(int32) -|=== - - -[[_bbed2f02db402d6ae09a0dcf86682c45]] -=== Map«int,long» -__Type__ : < string, integer(int64) > map - - -[[_reassignwrapper]] -=== ReassignWrapper - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**brokers** + -__optional__|< integer(int32) > array -|**topics** + -__optional__|< string > array -|=== - - -[[_topicandpartition]] -=== TopicAndPartition -__Type__ : object - - -[[_topicbrief]] -=== TopicBrief - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**isrRate** + -__optional__|number(double) -|**numPartition** + -__optional__|integer(int32) -|**topic** + -__optional__|string -|=== - - -[[_topicdetail]] -=== TopicDetail - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**factor** + -__optional__|integer(int32) -|**name** + -__optional__|string -|**partitions** + -__optional__|integer(int32) -|**prop** + -__optional__|< string, object > map -|=== - - -[[_topicmeta]] -=== TopicMeta - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**partitionCount** + -__optional__|integer(int32) -|**replicationFactor** + -__optional__|integer(int32) -|**topicCustomConfigs** + -__optional__|< string, object > map -|**topicName** + -__optional__|string -|**topicPartitionInfos** + -__optional__|< <<_topicpartitioninfo,TopicPartitionInfo>> > array -|=== - - -[[_topicpartitioninfo]] -=== TopicPartitionInfo - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**endOffset** + -__optional__|integer(int64) -|**in_sync** + -__optional__|boolean -|**isr** + -__optional__|< string > array -|**leader** + -__optional__|string -|**messageAvailable** + -__optional__|integer(int64) -|**partitionId** + -__optional__|integer(int32) -|**replicas** + -__optional__|< string > array -|**startOffset** + -__optional__|integer(int64) -|=== - - -[[_zkserverclient]] -=== ZkServerClient - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**host** + -__optional__|string -|**ops** + -__optional__|integer(int32) -|**port** + -__optional__|integer(int32) -|**queued** + -__optional__|integer(int32) -|**received** + -__optional__|integer(int32) -|**sent** + -__optional__|integer(int32) -|=== - - -[[_zkserverenvironment]] -=== ZkServerEnvironment - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**attributes** + -__optional__|< string, string > map -|=== - - -[[_zkserverstat]] -=== ZkServerStat - -[options="header", cols=".^3,.^4"] -|=== -|Name|Schema -|**avgLatency** + -__optional__|integer(int32) -|**buildDate** + -__optional__|string -|**clients** + -__optional__|< <<_zkserverclient,ZkServerClient>> > array -|**connections** + -__optional__|integer(int32) -|**maxLatency** + -__optional__|integer(int32) -|**minLatency** + -__optional__|integer(int32) -|**mode** + -__optional__|enum (Leader, Follower, Observer) -|**nodes** + -__optional__|integer(int32) -|**outstanding** + -__optional__|integer(int32) -|**received** + -__optional__|integer(int32) -|**sent** + -__optional__|integer(int32) -|**version** + -__optional__|string -|**zxId** + -__optional__|string -|=== - - - + +[[_definitions]] +== Definitions + +[[_addpartition]] +=== AddPartition + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**numPartitionsAdded** + +__optional__|integer(int32) +|**replicaAssignment** + +__optional__|string +|**topic** + +__optional__|string +|=== + + +[[_brokerinfo]] +=== BrokerInfo + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**endPoints** + +__optional__|< string > array +|**host** + +__optional__|string +|**id** + +__optional__|integer(int32) +|**jmxPort** + +__optional__|integer(int32) +|**port** + +__optional__|integer(int32) +|**rack** + +__optional__|string +|**securityProtocol** + +__optional__|object +|**startTime** + +__optional__|string(date-time) +|**version** + +__optional__|integer(int32) +|=== + + +[[_consumergroupdesc]] +=== ConsumerGroupDesc + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**consumerId** + +__optional__|string +|**currentOffset** + +__optional__|integer(int64) +|**groupName** + +__optional__|string +|**host** + +__optional__|string +|**lag** + +__optional__|integer(int64) +|**logEndOffset** + +__optional__|integer(int64) +|**partitionId** + +__optional__|integer(int32) +|**state** + +__optional__|enum (RUNNING, PENDING) +|**topic** + +__optional__|string +|**type** + +__optional__|enum (NEW, OLD) +|=== + + +[[_generalresponse]] +=== GeneralResponse + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**msg** + +__optional__|string +|**state** + +__optional__|enum (success, failure) +|=== + + +[[_25ace99c7bfccf8d17a8f035a9b4bd2f]] +=== HashMap«string,object» +__Type__ : < string, object > map + + +[[_healthcheckresult]] +=== HealthCheckResult + +[options="header", cols=".^3,.^11,.^4"] +|=== +|Name|Description|Schema +|**msg** + +__optional__||string +|**status** + +__optional__||string +|**timestamp** + +__optional__|**Example** : `"yyyy-MM-dd HH:mm:ss"`|string +|=== + + +[[_hostandport]] +=== HostAndPort + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**hostText** + +__optional__|string +|**port** + +__optional__|integer(int32) +|=== + + +[[_jmxconfiguration]] +=== JMXConfiguration + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**exclude** + +__optional__|<<_jmxfilter,JMXFilter>> +|**include** + +__optional__|<<_jmxfilter,JMXFilter>> +|=== + + +[[_jmxfilter]] +=== JMXFilter + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**attribute** + +__optional__|object +|**beanNames** + +__optional__|< string > array +|**beanRegexes** + +__optional__|< <<_pattern,Pattern>> > array +|**domain** + +__optional__|string +|**domainRegex** + +__optional__|<<_pattern,Pattern>> +|**emptyBeanName** + +__optional__|boolean +|**filter** + +__optional__|< string, object > map +|=== + + +[[_jmxmetricdata]] +=== JMXMetricData + +[options="header", cols=".^3,.^11,.^4"] +|=== +|Name|Description|Schema +|**collected** + +__optional__||boolean +|**host** + +__optional__||string +|**metrics** + +__optional__||< <<_25ace99c7bfccf8d17a8f035a9b4bd2f,HashMap«string,object»>> > array +|**msg** + +__optional__||string +|**timestamp** + +__optional__|**Example** : `"yyyy-MM-dd HH:mm:ss"`|string +|=== + + +[[_jmxmetricdatav1]] +=== JMXMetricDataV1 + +[options="header", cols=".^3,.^11,.^4"] +|=== +|Name|Description|Schema +|**collected** + +__optional__||boolean +|**host** + +__optional__||string +|**mbeanInfo** + +__optional__||object +|**msg** + +__optional__||string +|**timestamp** + +__optional__|**Example** : `"yyyy-MM-dd HH:mm:ss"`|string +|=== + + +[[_jmxquery]] +=== JMXQuery + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**filters** + +__optional__|< <<_jmxconfiguration,JMXConfiguration>> > array +|=== + + +[[_bbed2f02db402d6ae09a0dcf86682c45]] +=== Map«int,long» +__Type__ : < string, integer(int64) > map + + +[[_pattern]] +=== Pattern + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**cursor** + +__optional__|integer(int32) +|=== + + +[[_reassignwrapper]] +=== ReassignWrapper + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**brokers** + +__optional__|< integer(int32) > array +|**topics** + +__optional__|< string > array +|=== + + +[[_topicandpartition]] +=== TopicAndPartition +__Type__ : object + + +[[_topicbrief]] +=== TopicBrief + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**isrRate** + +__optional__|number(double) +|**numPartition** + +__optional__|integer(int32) +|**topic** + +__optional__|string +|=== + + +[[_topicdetail]] +=== TopicDetail + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**factor** + +__optional__|integer(int32) +|**name** + +__optional__|string +|**partitions** + +__optional__|integer(int32) +|**prop** + +__optional__|< string, object > map +|=== + + +[[_topicmeta]] +=== TopicMeta + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**partitionCount** + +__optional__|integer(int32) +|**replicationFactor** + +__optional__|integer(int32) +|**topicCustomConfigs** + +__optional__|< string, object > map +|**topicName** + +__optional__|string +|**topicPartitionInfos** + +__optional__|< <<_topicpartitioninfo,TopicPartitionInfo>> > array +|=== + + +[[_topicpartitioninfo]] +=== TopicPartitionInfo + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**endOffset** + +__optional__|integer(int64) +|**in_sync** + +__optional__|boolean +|**isr** + +__optional__|< string > array +|**leader** + +__optional__|string +|**messageAvailable** + +__optional__|integer(int64) +|**partitionId** + +__optional__|integer(int32) +|**replicas** + +__optional__|< string > array +|**startOffset** + +__optional__|integer(int64) +|=== + + +[[_user]] +=== User + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**password** + +__optional__|string +|**role** + +__optional__|string +|**username** + +__optional__|string +|=== + + +[[_zkserverclient]] +=== ZkServerClient + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**host** + +__optional__|string +|**ops** + +__optional__|integer(int32) +|**port** + +__optional__|integer(int32) +|**queued** + +__optional__|integer(int32) +|**received** + +__optional__|integer(int32) +|**sent** + +__optional__|integer(int32) +|=== + + +[[_zkserverenvironment]] +=== ZkServerEnvironment + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**attributes** + +__optional__|< string, string > map +|=== + + +[[_zkserverstat]] +=== ZkServerStat + +[options="header", cols=".^3,.^4"] +|=== +|Name|Schema +|**avgLatency** + +__optional__|integer(int32) +|**buildDate** + +__optional__|string +|**clients** + +__optional__|< <<_zkserverclient,ZkServerClient>> > array +|**connections** + +__optional__|integer(int32) +|**maxLatency** + +__optional__|integer(int32) +|**minLatency** + +__optional__|integer(int32) +|**mode** + +__optional__|enum (Leader, Follower, Observer, Standalone) +|**nodes** + +__optional__|integer(int32) +|**outstanding** + +__optional__|integer(int32) +|**received** + +__optional__|integer(int32) +|**sent** + +__optional__|integer(int32) +|**version** + +__optional__|string +|**zxId** + +__optional__|string +|=== + + + diff --git a/docs/index.html b/docs/index.html index ea6b3b6..6f10fe9 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,4279 +1,5440 @@ - - - - - - - -Kafka REST API SwaggerUI - - - - - -
-
-

1. Overview

-
-
-

Kafka REST API SwaggerUI

-
-
-

1.1. Version information

-
-

Version : 0.1.0

-
-
-
-

1.2. Contact information

-
-

Contact : gnuhpc
-Contact Email : gnuhpc@gmail.com

-
-
-
-

1.3. URI scheme

-
-

Host : localhost:8080
-BasePath : /

-
-
-
-

1.4. Tags

-
-
    -
  • -

    kafka-controller : Kafka Controller

    -
  • -
  • -

    zookeeper-controller : Zookeeper Controller

    -
  • -
-
-
-
-
-
-

2. Chapter of manual content 1

-
-
-

This is some dummy text

-
-
-

2.1. Sub chapter

-
-

Dummy text of sub chapter

-
-
-
-
-
-

3. Chapter of manual content 2

-
-
-

This is some dummy text

-
-
-
-
-

4. Resources

-
-
-

4.1. Kafka-controller

-
-

Kafka Controller

-
-
-

4.1.1. List brokers in this cluster

-
-
-
GET /kafka/brokers
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< BrokerInfo > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.2. Get the message from the offset of the partition in the topic, decoder is not supported yet

-
-
-
GET /kafka/consumer/{topic}/{partition}/{offset}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

offset
-required

offset

integer(int64)

Path

partition
-required

partition

integer(int32)

Path

topic
-required

topic

string

Query

decoder
-optional

decoder

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

string

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.3. Delete old Consumer Group

-
-
-
DELETE /kafka/consumergroup/{consumergroup}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumergroup
-required

consumergroup

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.4. getLastCommitTimestamp

-
-
-
GET /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumergroup
-required

consumergroup

string

Path

topic
-required

topic

string

Path

type
-required

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, < string, integer(int64) > map > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.5. Reset consumer group offset, earliest/latest can be used

-
-
-
PUT /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumergroup
-required

consumergroup

string

Path

offset
-required

offset

string

Path

partition
-required

partition

integer(int32)

Path

topic
-required

topic

string

Path

type
-required

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.6. List all consumer groups from zk and kafka

-
-
-
GET /kafka/consumergroups
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Query

topic
-optional

topic

string

Query

type
-optional

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, < string > array > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.7. Describe consumer groups, showing lag and offset, may be slow if multi topic are listened

-
-
-
GET /kafka/consumergroups/{consumerGroup}/{type}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumerGroup
-required

consumerGroup

string

Path

type
-required

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, < ConsumerGroupDesc > array > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.8. Get the topics involved of the specify consumer group

-
-
-
GET /kafka/consumergroups/{consumerGroup}/{type}/topic
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumerGroup
-required

consumerGroup

string

Path

type
-required

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.9. Describe consumer groups by topic, showing lag and offset

-
-
-
GET /kafka/consumergroups/{consumerGroup}/{type}/topic/{topic}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

consumerGroup
-required

consumerGroup

string

Path

topic
-required

topic

string

Path

type
-required

type

enum (NEW, OLD)

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< ConsumerGroupDesc > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.10. Add a partition to the topic

-
-
-
POST /kafka/partitions/add
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Body

addPartition
-required

addPartition

AddPartition

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

TopicMeta

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.11. Check the partition reassignment process

-
-
-
PUT /kafka/partitions/reassign/check
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Body

reassignStr
-required

reassignStr

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

-1

Reassignment Failed

No Content

0

Reassignment In Progress

No Content

1

Reassignment Completed

No Content

200

OK

< string, integer(int32) > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.12. Execute the partition reassignment

-
-
-
PUT /kafka/partitions/reassign/execute
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Body

reassignStr
-required

reassignStr

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, integer(int32) > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.13. Generate plan for the partition reassignment

-
-
-
POST /kafka/partitions/reassign/generate
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Body

reassignWrapper
-required

reassignWrapper

ReassignWrapper

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string > array

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.14. List topics

-
-
-
GET /kafka/topics
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.15. Create a topic

-
-
-
POST /kafka/topics/create
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Query

reassignStr
-optional

reassignStr

string

Body

topic
-required

topic

TopicDetail

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

201

Created

TopicMeta

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.16. Describe a topic by fetching the metadata and config

-
-
-
GET /kafka/topics/{topic}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

TopicMeta

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.17. Delete a topic (you should enable topic deletion

-
-
-
DELETE /kafka/topics/{topic}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.18. Create topic configs

-
-
-
POST /kafka/topics/{topic}/conf
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

Body

prop
-required

prop

< string, object > map

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.19. Get topic configs

-
-
-
GET /kafka/topics/{topic}/conf
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.20. Update topic configs

-
-
-
PUT /kafka/topics/{topic}/conf
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

Body

prop
-required

prop

< string, object > map

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.21. Delete topic configs

-
-
-
DELETE /kafka/topics/{topic}/conf
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

Body

delProps
-required

delProps

< string > array

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.22. Get topic config by key

-
-
-
GET /kafka/topics/{topic}/conf/{key}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

key
-required

key

string

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.23. Delete a topic config by key

-
-
-
DELETE /kafka/topics/{topic}/conf/{key}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

key
-required

key

string

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

boolean

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.24. Create a topic config by key

-
-
-
POST /kafka/topics/{topic}/conf/{key}={value}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

key
-required

key

string

Path

topic
-required

topic

string

Path

value
-required

value

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.25. Update a topic config by key

-
-
-
PUT /kafka/topics/{topic}/conf/{key}={value}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

key
-required

key

string

Path

topic
-required

topic

string

Path

value
-required

value

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.26. Tell if a topic exists

-
-
-
GET /kafka/topics/{topic}/exist
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

boolean

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.27. Write a message to the topic, for testing purpose

-
-
-
POST /kafka/topics/{topic}/write
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

topic
-required

topic

string

Body

message
-required

message

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

201

Created

GeneralResponse

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    text/plain

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.1.28. List topics Brief

-
-
-
GET /kafka/topicsbrief
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< TopicBrief > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-
-

4.2. Zookeeper-controller

-
-

Zookeeper Controller

-
-
-

4.2.1. Get the connection state of zookeeper

-
-
-
GET /zk/connstate
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

string

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.2.2. Get the environment information of zookeeper

-
-
-
GET /zk/env
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, ZkServerEnvironment > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.2.3. List a zookeeper path

-
-
-
GET /zk/ls/{path}
-
-
-
-
Parameters
- ------ - - - - - - - - - - - - - - - - -
TypeNameDescriptionSchema

Path

path
-required

path

string

-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-

4.2.4. Get the service state of zookeeper

-
-
-
GET /zk/stat
-
-
-
-
Responses
- ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
HTTP CodeDescriptionSchema

200

OK

< string, ZkServerStat > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

-
-
-
Consumes
-
-
    -
  • -

    application/json

    -
  • -
-
-
-
-
Produces
-
-
    -
  • -

    /

    -
  • -
-
-
-
-
-
-
-
-

5. Definitions

-
-
-

5.1. AddPartition

- ---- - - - - - - - - - - - - - - - - - - - - -
NameSchema

numPartitionsAdded
-optional

integer(int32)

replicaAssignment
-optional

string

topic
-optional

string

-
-
-

5.2. BrokerInfo

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

endPoints
-optional

< string > array

host
-optional

string

id
-optional

integer(int32)

jmxPort
-optional

integer(int32)

port
-optional

integer(int32)

rack
-optional

string

securityProtocol
-optional

object

startTime
-optional

string(date-time)

version
-optional

integer(int32)

-
-
-

5.3. ConsumerGroupDesc

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

consumerId
-optional

string

currentOffset
-optional

integer(int64)

groupName
-optional

string

host
-optional

string

lag
-optional

integer(int64)

logEndOffset
-optional

integer(int64)

partitionId
-optional

integer(int32)

state
-optional

enum (RUNNING, PENDING)

topic
-optional

string

type
-optional

enum (NEW, OLD)

-
-
-

5.4. GeneralResponse

- ---- - - - - - - - - - - - - - - - - -
NameSchema

msg
-optional

string

state
-optional

enum (success, failure)

-
-
-

5.5. HostAndPort

- ---- - - - - - - - - - - - - - - - - -
NameSchema

hostText
-optional

string

port
-optional

integer(int32)

-
-
-

5.6. Map«int,long»

-
-

Type : < string, integer(int64) > map

-
-
-
-

5.7. ReassignWrapper

- ---- - - - - - - - - - - - - - - - - -
NameSchema

brokers
-optional

< integer(int32) > array

topics
-optional

< string > array

-
-
-

5.8. TopicAndPartition

-
-

Type : object

-
-
-
-

5.9. TopicBrief

- ---- - - - - - - - - - - - - - - - - - - - - -
NameSchema

isrRate
-optional

number(double)

numPartition
-optional

integer(int32)

topic
-optional

string

-
-
-

5.10. TopicDetail

- ---- - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

factor
-optional

integer(int32)

name
-optional

string

partitions
-optional

integer(int32)

prop
-optional

< string, object > map

-
-
-

5.11. TopicMeta

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

partitionCount
-optional

integer(int32)

replicationFactor
-optional

integer(int32)

topicCustomConfigs
-optional

< string, object > map

topicName
-optional

string

topicPartitionInfos
-optional

< TopicPartitionInfo > array

-
-
-

5.12. TopicPartitionInfo

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

endOffset
-optional

integer(int64)

in_sync
-optional

boolean

isr
-optional

< string > array

leader
-optional

string

messageAvailable
-optional

integer(int64)

partitionId
-optional

integer(int32)

replicas
-optional

< string > array

startOffset
-optional

integer(int64)

-
-
-

5.13. ZkServerClient

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

host
-optional

string

ops
-optional

integer(int32)

port
-optional

integer(int32)

queued
-optional

integer(int32)

received
-optional

integer(int32)

sent
-optional

integer(int32)

-
-
-

5.14. ZkServerEnvironment

- ---- - - - - - - - - - - - - -
NameSchema

attributes
-optional

< string, string > map

-
-
-

5.15. ZkServerStat

- ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
NameSchema

avgLatency
-optional

integer(int32)

buildDate
-optional

string

clients
-optional

< ZkServerClient > array

connections
-optional

integer(int32)

maxLatency
-optional

integer(int32)

minLatency
-optional

integer(int32)

mode
-optional

enum (Leader, Follower, Observer)

nodes
-optional

integer(int32)

outstanding
-optional

integer(int32)

received
-optional

integer(int32)

sent
-optional

integer(int32)

version
-optional

string

zxId
-optional

string

-
-
-
-
- - + + + + + + + +Kafka REST API SwaggerUI + + + + + +
+
+

1. Overview

+
+
+

Kafka REST API SwaggerUI

+
+
+

1.1. Version information

+
+

Version : 0.1.0

+
+
+
+

1.2. Contact information

+
+

Contact : gnuhpc
+Contact Email : gnuhpc@gmail.com

+
+
+
+

1.3. URI scheme

+
+

Host : localhost:8080
+BasePath : /

+
+
+
+

1.4. Tags

+
+
    +
  • +

    collector-controller : Rest API for Collecting JMX Metric Data

    +
  • +
  • +

    kafka-controller : Kafka Controller

    +
  • +
  • +

    user-controller : Security User Management Controller.

    +
  • +
  • +

    zookeeper-controller : Zookeeper Controller

    +
  • +
+
+
+
+
+
+

2. Chapter of manual content 1

+
+
+

This is some dummy text

+
+
+

2.1. Sub chapter

+
+

Dummy text of sub chapter

+
+
+
+
+
+

3. Chapter of manual content 2

+
+
+

This is some dummy text

+
+
+
+
+

4. Resources

+
+
+

4.1. Collector-controller

+
+

Rest API for Collecting JMX Metric Data

+
+
+

4.1.1. Fetch all JMX metric data

+
+
+
GET /jmx/v1
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

jmxurl
+optional

Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< JMXMetricDataV1 > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.1.2. Fetch JMX metric data with query filter. You can get the query filter template through the API /jmx/v2/filters.

+
+
+
POST /jmx/v2
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

jmxurl
+optional

Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'

string

Body

jmxQuery
+required

jmxQuery

JMXQuery

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< JMXMetricData > array

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.1.3. List the query filter templates with the filterKey. If filterKey is set to empty, it will return all the templates.

+
+
+
GET /jmx/v2/filters
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

filterKey
+required

filterKey

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+
+

4.2. Kafka-controller

+
+

Kafka Controller

+
+
+

4.2.1. List brokers in this cluster

+
+
+
GET /kafka/brokers
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< BrokerInfo > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.2. Get the message from the offset of the partition in the topic, decoder is not supported yet

+
+
+
GET /kafka/consumer/{topic}/{partition}/{offset}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

offset
+required

offset

integer(int64)

Path

partition
+required

partition

integer(int32)

Path

topic
+required

topic

string

Query

decoder
+optional

decoder

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

string

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.3. Delete old Consumer Group

+
+
+
DELETE /kafka/consumergroup/{consumergroup}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumergroup
+required

consumergroup

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.4. getLastCommitTimestamp

+
+
+
GET /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumergroup
+required

consumergroup

string

Path

topic
+required

topic

string

Path

type
+required

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, < string, integer(int64) > map > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.5. Reset consumer group offset, earliest/latest can be used

+
+
+
PUT /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumergroup
+required

consumergroup

string

Path

offset
+required

offset

string

Path

partition
+required

partition

integer(int32)

Path

topic
+required

topic

string

Path

type
+required

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.6. List all consumer groups from zk and kafka

+
+
+
GET /kafka/consumergroups
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

topic
+optional

topic

string

Query

type
+optional

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, < string > array > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.7. Describe consumer groups, showing lag and offset, may be slow if multi topic are listened

+
+
+
GET /kafka/consumergroups/{consumerGroup}/{type}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumerGroup
+required

consumerGroup

string

Path

type
+required

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, < ConsumerGroupDesc > array > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.8. Get the topics involved of the specify consumer group

+
+
+
GET /kafka/consumergroups/{consumerGroup}/{type}/topic
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumerGroup
+required

consumerGroup

string

Path

type
+required

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.9. Describe consumer groups by topic, showing lag and offset

+
+
+
GET /kafka/consumergroups/{consumerGroup}/{type}/topic/{topic}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

consumerGroup
+required

consumerGroup

string

Path

topic
+required

topic

string

Path

type
+required

type

enum (NEW, OLD)

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< ConsumerGroupDesc > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.10. Check the cluster health.

+
+
+
GET /kafka/health
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

HealthCheckResult

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.11. Add a partition to the topic

+
+
+
POST /kafka/partitions/add
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

addPartition
+required

addPartition

AddPartition

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

TopicMeta

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.12. Check the partition reassignment process

+
+
+
PUT /kafka/partitions/reassign/check
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

reassignStr
+required

reassignStr

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

-1

Reassignment Failed

No Content

0

Reassignment In Progress

No Content

1

Reassignment Completed

No Content

200

OK

< string, integer(int32) > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.13. Execute the partition reassignment

+
+
+
PUT /kafka/partitions/reassign/execute
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

reassignStr
+required

reassignStr

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, integer(int32) > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.14. Generate plan for the partition reassignment

+
+
+
POST /kafka/partitions/reassign/generate
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

reassignWrapper
+required

reassignWrapper

ReassignWrapper

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string > array

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.15. List topics

+
+
+
GET /kafka/topics
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.16. Create a topic

+
+
+
POST /kafka/topics/create
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

reassignStr
+optional

reassignStr

string

Body

topic
+required

topic

TopicDetail

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

201

Created

TopicMeta

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.17. Describe a topic by fetching the metadata and config

+
+
+
GET /kafka/topics/{topic}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

TopicMeta

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.18. Delete a topic (you should enable topic deletion

+
+
+
DELETE /kafka/topics/{topic}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.19. Create topic configs

+
+
+
POST /kafka/topics/{topic}/conf
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

Body

prop
+required

prop

< string, object > map

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.20. Get topic configs

+
+
+
GET /kafka/topics/{topic}/conf
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.21. Update topic configs

+
+
+
PUT /kafka/topics/{topic}/conf
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

Body

prop
+required

prop

< string, object > map

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.22. Delete topic configs

+
+
+
DELETE /kafka/topics/{topic}/conf
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

Body

delProps
+required

delProps

< string > array

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.23. Get topic config by key

+
+
+
GET /kafka/topics/{topic}/conf/{key}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

key
+required

key

string

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.24. Delete a topic config by key

+
+
+
DELETE /kafka/topics/{topic}/conf/{key}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

key
+required

key

string

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

boolean

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.25. Create a topic config by key

+
+
+
POST /kafka/topics/{topic}/conf/{key}={value}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

key
+required

key

string

Path

topic
+required

topic

string

Path

value
+required

value

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.26. Update a topic config by key

+
+
+
PUT /kafka/topics/{topic}/conf/{key}={value}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

key
+required

key

string

Path

topic
+required

topic

string

Path

value
+required

value

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, object > map

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.27. Tell if a topic exists

+
+
+
GET /kafka/topics/{topic}/exist
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

boolean

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.28. Write a message to the topic, for testing purpose

+
+
+
POST /kafka/topics/{topic}/write
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

topic
+required

topic

string

Body

message
+required

message

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

201

Created

GeneralResponse

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    text/plain

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.2.29. List topics Brief

+
+
+
GET /kafka/topicsbrief
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< TopicBrief > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+
+

4.3. User-controller

+
+

Security User Management Controller.

+
+
+

4.3.1. Add user.

+
+
+
POST /users
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

user
+required

user

User

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.3.2. Get user list.

+
+
+
GET /users
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.3.3. Modify user information.

+
+
+
PUT /users
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Body

user
+required

user

User

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

201

Created

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.3.4. Delete user.

+
+
+
DELETE /users/{username}
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Path

username
+required

username

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

GeneralResponse

204

No Content

No Content

401

Unauthorized

No Content

403

Forbidden

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+
+

4.4. Zookeeper-controller

+
+

Zookeeper Controller

+
+
+

4.4.1. Get the connection state of zookeeper

+
+
+
GET /zk/connstate
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

string

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.4.2. Get the environment information of zookeeper

+
+
+
GET /zk/env
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, ZkServerEnvironment > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.4.3. Get data of a zookeeper path

+
+
+
GET /zk/get/path
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

path
+required

path

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, string > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.4.4. List a zookeeper path

+
+
+
GET /zk/ls/path
+
+
+
+
Parameters
+ ++++++ + + + + + + + + + + + + + + + + +
TypeNameDescriptionSchema

Query

path
+required

path

string

+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string > array

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+

4.4.5. Get the service state of zookeeper

+
+
+
GET /zk/stat
+
+
+
+
Responses
+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
HTTP CodeDescriptionSchema

200

OK

< string, ZkServerStat > map

401

Unauthorized

No Content

403

Forbidden

No Content

404

Not Found

No Content

+
+
+
Consumes
+
+
    +
  • +

    application/json

    +
  • +
+
+
+
+
Produces
+
+
    +
  • +

    /

    +
  • +
+
+
+
+
+
+
+
+

5. Definitions

+
+
+

5.1. AddPartition

+ ++++ + + + + + + + + + + + + + + + + + + + + +
NameSchema

numPartitionsAdded
+optional

integer(int32)

replicaAssignment
+optional

string

topic
+optional

string

+
+
+

5.2. BrokerInfo

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

endPoints
+optional

< string > array

host
+optional

string

id
+optional

integer(int32)

jmxPort
+optional

integer(int32)

port
+optional

integer(int32)

rack
+optional

string

securityProtocol
+optional

object

startTime
+optional

string(date-time)

version
+optional

integer(int32)

+
+
+

5.3. ConsumerGroupDesc

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

consumerId
+optional

string

currentOffset
+optional

integer(int64)

groupName
+optional

string

host
+optional

string

lag
+optional

integer(int64)

logEndOffset
+optional

integer(int64)

partitionId
+optional

integer(int32)

state
+optional

enum (RUNNING, PENDING)

topic
+optional

string

type
+optional

enum (NEW, OLD)

+
+
+

5.4. GeneralResponse

+ ++++ + + + + + + + + + + + + + + + + +
NameSchema

msg
+optional

string

state
+optional

enum (success, failure)

+
+
+

5.5. HashMap«string,object»

+
+

Type : < string, object > map

+
+
+
+

5.6. HealthCheckResult

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

msg
+optional

string

status
+optional

string

timestamp
+optional

Example : "yyyy-MM-dd HH:mm:ss"

string

+
+
+

5.7. HostAndPort

+ ++++ + + + + + + + + + + + + + + + + +
NameSchema

hostText
+optional

string

port
+optional

integer(int32)

+
+
+

5.8. JMXConfiguration

+ ++++ + + + + + + + + + + + + + + + + +
NameSchema

exclude
+optional

JMXFilter

include
+optional

JMXFilter

+
+
+

5.9. JMXFilter

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

attribute
+optional

object

beanNames
+optional

< string > array

beanRegexes
+optional

< Pattern > array

domain
+optional

string

domainRegex
+optional

Pattern

emptyBeanName
+optional

boolean

filter
+optional

< string, object > map

+
+
+

5.10. JMXMetricData

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

collected
+optional

boolean

host
+optional

string

metrics
+optional

< HashMap«string,object» > array

msg
+optional

string

timestamp
+optional

Example : "yyyy-MM-dd HH:mm:ss"

string

+
+
+

5.11. JMXMetricDataV1

+ +++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameDescriptionSchema

collected
+optional

boolean

host
+optional

string

mbeanInfo
+optional

object

msg
+optional

string

timestamp
+optional

Example : "yyyy-MM-dd HH:mm:ss"

string

+
+
+

5.12. JMXQuery

+ ++++ + + + + + + + + + + + + +
NameSchema

filters
+optional

< JMXConfiguration > array

+
+
+

5.13. Map«int,long»

+
+

Type : < string, integer(int64) > map

+
+
+
+

5.14. Pattern

+ ++++ + + + + + + + + + + + + +
NameSchema

cursor
+optional

integer(int32)

+
+
+

5.15. ReassignWrapper

+ ++++ + + + + + + + + + + + + + + + + +
NameSchema

brokers
+optional

< integer(int32) > array

topics
+optional

< string > array

+
+
+

5.16. TopicAndPartition

+
+

Type : object

+
+
+
+

5.17. TopicBrief

+ ++++ + + + + + + + + + + + + + + + + + + + + +
NameSchema

isrRate
+optional

number(double)

numPartition
+optional

integer(int32)

topic
+optional

string

+
+
+

5.18. TopicDetail

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

factor
+optional

integer(int32)

name
+optional

string

partitions
+optional

integer(int32)

prop
+optional

< string, object > map

+
+
+

5.19. TopicMeta

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

partitionCount
+optional

integer(int32)

replicationFactor
+optional

integer(int32)

topicCustomConfigs
+optional

< string, object > map

topicName
+optional

string

topicPartitionInfos
+optional

< TopicPartitionInfo > array

+
+
+

5.20. TopicPartitionInfo

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

endOffset
+optional

integer(int64)

in_sync
+optional

boolean

isr
+optional

< string > array

leader
+optional

string

messageAvailable
+optional

integer(int64)

partitionId
+optional

integer(int32)

replicas
+optional

< string > array

startOffset
+optional

integer(int64)

+
+
+

5.21. User

+ ++++ + + + + + + + + + + + + + + + + + + + + +
NameSchema

password
+optional

string

role
+optional

string

username
+optional

string

+
+
+

5.22. ZkServerClient

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

host
+optional

string

ops
+optional

integer(int32)

port
+optional

integer(int32)

queued
+optional

integer(int32)

received
+optional

integer(int32)

sent
+optional

integer(int32)

+
+
+

5.23. ZkServerEnvironment

+ ++++ + + + + + + + + + + + + +
NameSchema

attributes
+optional

< string, string > map

+
+
+

5.24. ZkServerStat

+ ++++ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
NameSchema

avgLatency
+optional

integer(int32)

buildDate
+optional

string

clients
+optional

< ZkServerClient > array

connections
+optional

integer(int32)

maxLatency
+optional

integer(int32)

minLatency
+optional

integer(int32)

mode
+optional

enum (Leader, Follower, Observer, Standalone)

nodes
+optional

integer(int32)

outstanding
+optional

integer(int32)

received
+optional

integer(int32)

sent
+optional

integer(int32)

version
+optional

string

zxId
+optional

string

+
+
+
+
+ + \ No newline at end of file diff --git a/docs/index.pdf b/docs/index.pdf index f8eea3f..847550e 100644 Binary files a/docs/index.pdf and b/docs/index.pdf differ diff --git a/docs/overview.adoc b/docs/overview.adoc index 11862a1..15ca086 100644 --- a/docs/overview.adoc +++ b/docs/overview.adoc @@ -1,32 +1,34 @@ -= Kafka REST API SwaggerUI - - -[[_overview]] -== Overview -Kafka REST API SwaggerUI - - -=== Version information -[%hardbreaks] -__Version__ : 0.1.0 - - -=== Contact information -[%hardbreaks] -__Contact__ : gnuhpc -__Contact Email__ : gnuhpc@gmail.com - - -=== URI scheme -[%hardbreaks] -__Host__ : localhost:8080 -__BasePath__ : / - - -=== Tags - -* kafka-controller : Kafka Controller -* zookeeper-controller : Zookeeper Controller - - - += Kafka REST API SwaggerUI + + +[[_overview]] +== Overview +Kafka REST API SwaggerUI + + +=== Version information +[%hardbreaks] +__Version__ : 0.1.0 + + +=== Contact information +[%hardbreaks] +__Contact__ : gnuhpc +__Contact Email__ : gnuhpc@gmail.com + + +=== URI scheme +[%hardbreaks] +__Host__ : localhost:8080 +__BasePath__ : / + + +=== Tags + +* collector-controller : Rest API for Collecting JMX Metric Data +* kafka-controller : Kafka Controller +* user-controller : Security User Management Controller. +* zookeeper-controller : Zookeeper Controller + + + diff --git a/docs/paths.adoc b/docs/paths.adoc index 0e01371..be1b6cd 100644 --- a/docs/paths.adoc +++ b/docs/paths.adoc @@ -1,1265 +1,1611 @@ - -[[_paths]] -== Resources - -[[_kafka-controller_resource]] -=== Kafka-controller -Kafka Controller - - -[[_listbrokersusingget]] -==== List brokers in this cluster -.... -GET /kafka/brokers -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< <<_brokerinfo,BrokerInfo>> > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_getmessageusingget]] -==== Get the message from the offset of the partition in the topic, decoder is not supported yet -.... -GET /kafka/consumer/{topic}/{partition}/{offset} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**offset** + -__required__|offset|integer(int64) -|**Path**|**partition** + -__required__|partition|integer(int32) -|**Path**|**topic** + -__required__|topic|string -|**Query**|**decoder** + -__optional__|decoder|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|string -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_deleteoldconsumergroupusingdelete]] -==== Delete old Consumer Group -.... -DELETE /kafka/consumergroup/{consumergroup} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumergroup** + -__required__|consumergroup|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|<<_generalresponse,GeneralResponse>> -|**204**|No Content|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_getlastcommittimestampusingget]] -==== getLastCommitTimestamp -.... -GET /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumergroup** + -__required__|consumergroup|string -|**Path**|**topic** + -__required__|topic|string -|**Path**|**type** + -__required__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, < string, integer(int64) > map > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_resetoffsetusingput]] -==== Reset consumer group offset, earliest/latest can be used -.... -PUT /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumergroup** + -__required__|consumergroup|string -|**Path**|**offset** + -__required__|offset|string -|**Path**|**partition** + -__required__|partition|integer(int32) -|**Path**|**topic** + -__required__|topic|string -|**Path**|**type** + -__required__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|<<_generalresponse,GeneralResponse>> -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_listallconsumergroupsusingget]] -==== List all consumer groups from zk and kafka -.... -GET /kafka/consumergroups -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Query**|**topic** + -__optional__|topic|string -|**Query**|**type** + -__optional__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, < string > array > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_describecgusingget]] -==== Describe consumer groups, showing lag and offset, may be slow if multi topic are listened -.... -GET /kafka/consumergroups/{consumerGroup}/{type} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumerGroup** + -__required__|consumerGroup|string -|**Path**|**type** + -__required__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, < <<_consumergroupdesc,ConsumerGroupDesc>> > array > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_listtopicbycgusingget]] -==== Get the topics involved of the specify consumer group -.... -GET /kafka/consumergroups/{consumerGroup}/{type}/topic -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumerGroup** + -__required__|consumerGroup|string -|**Path**|**type** + -__required__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_describecgbytopicusingget]] -==== Describe consumer groups by topic, showing lag and offset -.... -GET /kafka/consumergroups/{consumerGroup}/{type}/topic/{topic} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**consumerGroup** + -__required__|consumerGroup|string -|**Path**|**topic** + -__required__|topic|string -|**Path**|**type** + -__required__|type|enum (NEW, OLD) -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< <<_consumergroupdesc,ConsumerGroupDesc>> > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_addpartitionusingpost]] -==== Add a partition to the topic -.... -POST /kafka/partitions/add -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Body**|**addPartition** + -__required__|addPartition|<<_addpartition,AddPartition>> -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|<<_topicmeta,TopicMeta>> -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_checkreassignpartitionsusingput]] -==== Check the partition reassignment process -.... -PUT /kafka/partitions/reassign/check -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Body**|**reassignStr** + -__required__|reassignStr|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**-1**|Reassignment Failed|No Content -|**0**|Reassignment In Progress|No Content -|**1**|Reassignment Completed|No Content -|**200**|OK|< string, integer(int32) > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_executereassignpartitionsusingput]] -==== Execute the partition reassignment -.... -PUT /kafka/partitions/reassign/execute -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Body**|**reassignStr** + -__required__|reassignStr|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, integer(int32) > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_generatereassignpartitionsusingpost]] -==== Generate plan for the partition reassignment -.... -POST /kafka/partitions/reassign/generate -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Body**|**reassignWrapper** + -__required__|reassignWrapper|<<_reassignwrapper,ReassignWrapper>> -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string > array -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_listtopicsusingget]] -==== List topics -.... -GET /kafka/topics -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_createtopicusingpost]] -==== Create a topic -.... -POST /kafka/topics/create -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Query**|**reassignStr** + -__optional__|reassignStr|string -|**Body**|**topic** + -__required__|topic|<<_topicdetail,TopicDetail>> -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**201**|Created|<<_topicmeta,TopicMeta>> -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_describetopicusingget]] -==== Describe a topic by fetching the metadata and config -.... -GET /kafka/topics/{topic} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|<<_topicmeta,TopicMeta>> -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_deletetopicusingdelete]] -==== Delete a topic (you should enable topic deletion -.... -DELETE /kafka/topics/{topic} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|<<_generalresponse,GeneralResponse>> -|**204**|No Content|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_createtopicconfigusingpost]] -==== Create topic configs -.... -POST /kafka/topics/{topic}/conf -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|**Body**|**prop** + -__required__|prop|< string, object > map -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_gettopicconfigusingget]] -==== Get topic configs -.... -GET /kafka/topics/{topic}/conf -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_updatetopicconfigusingput]] -==== Update topic configs -.... -PUT /kafka/topics/{topic}/conf -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|**Body**|**prop** + -__required__|prop|< string, object > map -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_deletetopicconfigusingdelete]] -==== Delete topic configs -.... -DELETE /kafka/topics/{topic}/conf -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|**Body**|**delProps** + -__required__|delProps|< string > array -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**204**|No Content|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_gettopicconfigbykeyusingget]] -==== Get topic config by key -.... -GET /kafka/topics/{topic}/conf/{key} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**key** + -__required__|key|string -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_deletetopicconfigbykeyusingdelete]] -==== Delete a topic config by key -.... -DELETE /kafka/topics/{topic}/conf/{key} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**key** + -__required__|key|string -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|boolean -|**204**|No Content|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_createtopicconfigbykeyusingpost]] -==== Create a topic config by key -.... -POST /kafka/topics/{topic}/conf/{key}={value} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**key** + -__required__|key|string -|**Path**|**topic** + -__required__|topic|string -|**Path**|**value** + -__required__|value|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_updatetopicconfigbykeyusingput]] -==== Update a topic config by key -.... -PUT /kafka/topics/{topic}/conf/{key}={value} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**key** + -__required__|key|string -|**Path**|**topic** + -__required__|topic|string -|**Path**|**value** + -__required__|value|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, object > map -|**201**|Created|No Content -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_existtopicusingget]] -==== Tell if a topic exists -.... -GET /kafka/topics/{topic}/exist -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|boolean -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_writemessageusingpost]] -==== Write a message to the topic, for testing purpose -.... -POST /kafka/topics/{topic}/write -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**topic** + -__required__|topic|string -|**Body**|**message** + -__required__|message|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**201**|Created|<<_generalresponse,GeneralResponse>> -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `text/plain` - - -===== Produces - -* `*/*` - - -[[_listtopicbriefusingget]] -==== List topics Brief -.... -GET /kafka/topicsbrief -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< <<_topicbrief,TopicBrief>> > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_zookeeper-controller_resource]] -=== Zookeeper-controller -Zookeeper Controller - - -[[_zkconnstateusingget]] -==== Get the connection state of zookeeper -.... -GET /zk/connstate -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|string -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_getenvusingget]] -==== Get the environment information of zookeeper -.... -GET /zk/env -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, <<_zkserverenvironment,ZkServerEnvironment>> > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_lsusingget]] -==== List a zookeeper path -.... -GET /zk/ls/{path} -.... - - -===== Parameters - -[options="header", cols=".^2,.^3,.^9,.^4"] -|=== -|Type|Name|Description|Schema -|**Path**|**path** + -__required__|path|string -|=== - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string > array -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - -[[_getstatusingget]] -==== Get the service state of zookeeper -.... -GET /zk/stat -.... - - -===== Responses - -[options="header", cols=".^2,.^14,.^4"] -|=== -|HTTP Code|Description|Schema -|**200**|OK|< string, <<_zkserverstat,ZkServerStat>> > map -|**401**|Unauthorized|No Content -|**403**|Forbidden|No Content -|**404**|Not Found|No Content -|=== - - -===== Consumes - -* `application/json` - - -===== Produces - -* `*/*` - - - + +[[_paths]] +== Resources + +[[_collector-controller_resource]] +=== Collector-controller +Rest API for Collecting JMX Metric Data + + +[[_collectjmxmetricusingget]] +==== Fetch all JMX metric data +.... +GET /jmx/v1 +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**jmxurl** + +__optional__|Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< <<_jmxmetricdatav1,JMXMetricDataV1>> > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_collectjmxmetricusingpost]] +==== Fetch JMX metric data with query filter. You can get the query filter template through the API /jmx/v2/filters. +.... +POST /jmx/v2 +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**jmxurl** + +__optional__|Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'|string +|**Body**|**jmxQuery** + +__required__|jmxQuery|<<_jmxquery,JMXQuery>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< <<_jmxmetricdata,JMXMetricData>> > array +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_listjmxfiltertemplateusingget]] +==== List the query filter templates with the filterKey. If filterKey is set to empty, it will return all the templates. +.... +GET /jmx/v2/filters +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**filterKey** + +__required__|filterKey|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_kafka-controller_resource]] +=== Kafka-controller +Kafka Controller + + +[[_listbrokersusingget]] +==== List brokers in this cluster +.... +GET /kafka/brokers +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< <<_brokerinfo,BrokerInfo>> > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_getmessageusingget]] +==== Get the message from the offset of the partition in the topic, decoder is not supported yet +.... +GET /kafka/consumer/{topic}/{partition}/{offset} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**offset** + +__required__|offset|integer(int64) +|**Path**|**partition** + +__required__|partition|integer(int32) +|**Path**|**topic** + +__required__|topic|string +|**Query**|**decoder** + +__optional__|decoder|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|string +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_deleteoldconsumergroupusingdelete]] +==== Delete old Consumer Group +.... +DELETE /kafka/consumergroup/{consumergroup} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumergroup** + +__required__|consumergroup|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**204**|No Content|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_getlastcommittimestampusingget]] +==== getLastCommitTimestamp +.... +GET /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumergroup** + +__required__|consumergroup|string +|**Path**|**topic** + +__required__|topic|string +|**Path**|**type** + +__required__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, < string, integer(int64) > map > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_resetoffsetusingput]] +==== Reset consumer group offset, earliest/latest can be used +.... +PUT /kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumergroup** + +__required__|consumergroup|string +|**Path**|**offset** + +__required__|offset|string +|**Path**|**partition** + +__required__|partition|integer(int32) +|**Path**|**topic** + +__required__|topic|string +|**Path**|**type** + +__required__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_listallconsumergroupsusingget]] +==== List all consumer groups from zk and kafka +.... +GET /kafka/consumergroups +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**topic** + +__optional__|topic|string +|**Query**|**type** + +__optional__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, < string > array > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_describecgusingget]] +==== Describe consumer groups, showing lag and offset, may be slow if multi topic are listened +.... +GET /kafka/consumergroups/{consumerGroup}/{type} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumerGroup** + +__required__|consumerGroup|string +|**Path**|**type** + +__required__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, < <<_consumergroupdesc,ConsumerGroupDesc>> > array > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_listtopicbycgusingget]] +==== Get the topics involved of the specify consumer group +.... +GET /kafka/consumergroups/{consumerGroup}/{type}/topic +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumerGroup** + +__required__|consumerGroup|string +|**Path**|**type** + +__required__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_describecgbytopicusingget]] +==== Describe consumer groups by topic, showing lag and offset +.... +GET /kafka/consumergroups/{consumerGroup}/{type}/topic/{topic} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**consumerGroup** + +__required__|consumerGroup|string +|**Path**|**topic** + +__required__|topic|string +|**Path**|**type** + +__required__|type|enum (NEW, OLD) +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< <<_consumergroupdesc,ConsumerGroupDesc>> > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_healthcheckusingget]] +==== Check the cluster health. +.... +GET /kafka/health +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_healthcheckresult,HealthCheckResult>> +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_addpartitionusingpost]] +==== Add a partition to the topic +.... +POST /kafka/partitions/add +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**addPartition** + +__required__|addPartition|<<_addpartition,AddPartition>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_topicmeta,TopicMeta>> +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_checkreassignpartitionsusingput]] +==== Check the partition reassignment process +.... +PUT /kafka/partitions/reassign/check +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**reassignStr** + +__required__|reassignStr|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**-1**|Reassignment Failed|No Content +|**0**|Reassignment In Progress|No Content +|**1**|Reassignment Completed|No Content +|**200**|OK|< string, integer(int32) > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_executereassignpartitionsusingput]] +==== Execute the partition reassignment +.... +PUT /kafka/partitions/reassign/execute +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**reassignStr** + +__required__|reassignStr|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, integer(int32) > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_generatereassignpartitionsusingpost]] +==== Generate plan for the partition reassignment +.... +POST /kafka/partitions/reassign/generate +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**reassignWrapper** + +__required__|reassignWrapper|<<_reassignwrapper,ReassignWrapper>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string > array +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_listtopicsusingget]] +==== List topics +.... +GET /kafka/topics +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_createtopicusingpost]] +==== Create a topic +.... +POST /kafka/topics/create +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**reassignStr** + +__optional__|reassignStr|string +|**Body**|**topic** + +__required__|topic|<<_topicdetail,TopicDetail>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**201**|Created|<<_topicmeta,TopicMeta>> +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_describetopicusingget]] +==== Describe a topic by fetching the metadata and config +.... +GET /kafka/topics/{topic} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_topicmeta,TopicMeta>> +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_deletetopicusingdelete]] +==== Delete a topic (you should enable topic deletion +.... +DELETE /kafka/topics/{topic} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**204**|No Content|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_createtopicconfigusingpost]] +==== Create topic configs +.... +POST /kafka/topics/{topic}/conf +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|**Body**|**prop** + +__required__|prop|< string, object > map +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_gettopicconfigusingget]] +==== Get topic configs +.... +GET /kafka/topics/{topic}/conf +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_updatetopicconfigusingput]] +==== Update topic configs +.... +PUT /kafka/topics/{topic}/conf +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|**Body**|**prop** + +__required__|prop|< string, object > map +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_deletetopicconfigusingdelete]] +==== Delete topic configs +.... +DELETE /kafka/topics/{topic}/conf +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|**Body**|**delProps** + +__required__|delProps|< string > array +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**204**|No Content|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_gettopicconfigbykeyusingget]] +==== Get topic config by key +.... +GET /kafka/topics/{topic}/conf/{key} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**key** + +__required__|key|string +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_deletetopicconfigbykeyusingdelete]] +==== Delete a topic config by key +.... +DELETE /kafka/topics/{topic}/conf/{key} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**key** + +__required__|key|string +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|boolean +|**204**|No Content|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_createtopicconfigbykeyusingpost]] +==== Create a topic config by key +.... +POST /kafka/topics/{topic}/conf/{key}={value} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**key** + +__required__|key|string +|**Path**|**topic** + +__required__|topic|string +|**Path**|**value** + +__required__|value|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_updatetopicconfigbykeyusingput]] +==== Update a topic config by key +.... +PUT /kafka/topics/{topic}/conf/{key}={value} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**key** + +__required__|key|string +|**Path**|**topic** + +__required__|topic|string +|**Path**|**value** + +__required__|value|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, object > map +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_existtopicusingget]] +==== Tell if a topic exists +.... +GET /kafka/topics/{topic}/exist +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|boolean +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_writemessageusingpost]] +==== Write a message to the topic, for testing purpose +.... +POST /kafka/topics/{topic}/write +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**topic** + +__required__|topic|string +|**Body**|**message** + +__required__|message|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**201**|Created|<<_generalresponse,GeneralResponse>> +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `text/plain` + + +===== Produces + +* `*/*` + + +[[_listtopicbriefusingget]] +==== List topics Brief +.... +GET /kafka/topicsbrief +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< <<_topicbrief,TopicBrief>> > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_user-controller_resource]] +=== User-controller +Security User Management Controller. + + +[[_adduserusingpost]] +==== Add user. +.... +POST /users +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**user** + +__required__|user|<<_user,User>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_listuserusingget]] +==== Get user list. +.... +GET /users +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_modifyuserusingput]] +==== Modify user information. +.... +PUT /users +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Body**|**user** + +__required__|user|<<_user,User>> +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**201**|Created|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_deluserusingdelete]] +==== Delete user. +.... +DELETE /users/{username} +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Path**|**username** + +__required__|username|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|<<_generalresponse,GeneralResponse>> +|**204**|No Content|No Content +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_zookeeper-controller_resource]] +=== Zookeeper-controller +Zookeeper Controller + + +[[_zkconnstateusingget]] +==== Get the connection state of zookeeper +.... +GET /zk/connstate +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|string +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_getenvusingget]] +==== Get the environment information of zookeeper +.... +GET /zk/env +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, <<_zkserverenvironment,ZkServerEnvironment>> > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_getusingget]] +==== Get data of a zookeeper path +.... +GET /zk/get/path +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**path** + +__required__|path|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, string > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_lsusingget]] +==== List a zookeeper path +.... +GET /zk/ls/path +.... + + +===== Parameters + +[options="header", cols=".^2,.^3,.^9,.^4"] +|=== +|Type|Name|Description|Schema +|**Query**|**path** + +__required__|path|string +|=== + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string > array +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + +[[_getstatusingget]] +==== Get the service state of zookeeper +.... +GET /zk/stat +.... + + +===== Responses + +[options="header", cols=".^2,.^14,.^4"] +|=== +|HTTP Code|Description|Schema +|**200**|OK|< string, <<_zkserverstat,ZkServerStat>> > map +|**401**|Unauthorized|No Content +|**403**|Forbidden|No Content +|**404**|Not Found|No Content +|=== + + +===== Consumes + +* `application/json` + + +===== Produces + +* `*/*` + + + diff --git a/docs/security.adoc b/docs/security.adoc deleted file mode 100644 index 99a8091..0000000 --- a/docs/security.adoc +++ /dev/null @@ -1,2 +0,0 @@ - - diff --git a/pics/ShowApi.png b/pics/ShowApi.png index bf8b3f9..00ca838 100644 Binary files a/pics/ShowApi.png and b/pics/ShowApi.png differ diff --git a/pom.xml b/pom.xml index 58b759d..636648c 100644 --- a/pom.xml +++ b/pom.xml @@ -33,6 +33,8 @@ ${project.build.directory}/asciidoc/pdf ${swagger.output.dir}/swagger.json + 2.9.1 + 1.19 @@ -78,6 +80,11 @@ spring-boot-starter-web + + org.springframework.boot + spring-boot-starter-security + + org.springframework.boot spring-boot-starter @@ -91,7 +98,7 @@ org.springframework.boot spring-boot-starter-log4j - RELEASE + 1.3.8.RELEASE @@ -196,8 +203,24 @@ com.fasterxml.jackson.datatype jackson-datatype-joda - 2.8.9 + ${jackson.version} + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + + com.fasterxml.jackson.datatype + jackson-datatype-jsr310 + + + com.fasterxml.jackson.dataformat + jackson-dataformat-yaml + ${jackson.version} + + org.apache.commons commons-collections4 @@ -233,6 +256,7 @@ net.alchim31.maven scala-maven-plugin + 3.3.1 scala-compile diff --git a/security/security.yml b/security/security.yml new file mode 100644 index 0000000..c3fa306 --- /dev/null +++ b/security/security.yml @@ -0,0 +1,4 @@ +--- +admin: + password: "$2a$10$cwkLeAFbPSNWEvjnL.w2FeoEPIv.MMEb0Pk541TiuqGRHP.x8ReoK" + role: "admin" diff --git a/src/docs/swagger/swagger.json b/src/docs/swagger/swagger.json index cb6f948..5a15f44 100644 --- a/src/docs/swagger/swagger.json +++ b/src/docs/swagger/swagger.json @@ -1 +1 @@ -{"swagger":"2.0","info":{"description":"Kafka REST API SwaggerUI","version":"0.1.0","title":"Kafka REST API SwaggerUI","contact":{"name":"gnuhpc","url":"https://github.com/gnuhpc","email":"gnuhpc@gmail.com"}},"host":"localhost:8080","basePath":"/","tags":[{"name":"zookeeper-controller","description":"Zookeeper Controller"},{"name":"kafka-controller","description":"Kafka Controller"}],"paths":{"/kafka/brokers":{"get":{"tags":["kafka-controller"],"summary":"List brokers in this cluster","operationId":"listBrokersUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/BrokerInfo"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumer/{topic}/{partition}/{offset}":{"get":{"tags":["kafka-controller"],"summary":"Get the message from the offset of the partition in the topic, decoder is not supported yet","operationId":"getMessageUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"partition","in":"path","description":"partition","required":true,"type":"integer","format":"int32"},{"name":"offset","in":"path","description":"offset","required":true,"type":"integer","format":"int64"},{"name":"decoder","in":"query","description":"decoder","required":false,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"string"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroup/{consumergroup}":{"delete":{"tags":["kafka-controller"],"summary":"Delete old Consumer Group","operationId":"deleteOldConsumerGroupUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime":{"get":{"tags":["kafka-controller"],"summary":"getLastCommitTimestamp","operationId":"getLastCommitTimestampUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"},{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object","additionalProperties":{"type":"integer","format":"int64"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset}":{"put":{"tags":["kafka-controller"],"summary":"Reset consumer group offset, earliest/latest can be used","operationId":"resetOffsetUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"partition","in":"path","description":"partition","required":true,"type":"integer","format":"int32"},{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"},{"name":"offset","in":"path","description":"offset","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups":{"get":{"tags":["kafka-controller"],"summary":"List all consumer groups from zk and kafka","operationId":"listAllConsumerGroupsUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"type","in":"query","description":"type","required":false,"type":"string","enum":["NEW","OLD"]},{"name":"topic","in":"query","description":"topic","required":false,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"array","items":{"type":"string"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}":{"get":{"tags":["kafka-controller"],"summary":"Describe consumer groups, showing lag and offset, may be slow if multi topic are listened","operationId":"describeCGUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"array","items":{"$ref":"#/definitions/ConsumerGroupDesc"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}/topic":{"get":{"tags":["kafka-controller"],"summary":"Get the topics involved of the specify consumer group","operationId":"listTopicByCGUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}/topic/{topic}":{"get":{"tags":["kafka-controller"],"summary":"Describe consumer groups by topic, showing lag and offset","operationId":"describeCGByTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]},{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/ConsumerGroupDesc"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/add":{"post":{"tags":["kafka-controller"],"summary":"Add a partition to the topic","operationId":"addPartitionUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"addPartition","description":"addPartition","required":true,"schema":{"$ref":"#/definitions/AddPartition"}}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/TopicMeta"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/check":{"put":{"tags":["kafka-controller"],"summary":"Check the partition reassignment process","operationId":"checkReassignPartitionsUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignStr","description":"reassignStr","required":true,"schema":{"type":"string"}}],"responses":{"-1":{"description":"Reassignment Failed"},"0":{"description":"Reassignment In Progress"},"1":{"description":"Reassignment Completed"},"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"integer","format":"int32"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/execute":{"put":{"tags":["kafka-controller"],"summary":"Execute the partition reassignment","operationId":"executeReassignPartitionsUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignStr","description":"reassignStr","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"integer","format":"int32"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/generate":{"post":{"tags":["kafka-controller"],"summary":"Generate plan for the partition reassignment","operationId":"generateReassignPartitionsUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignWrapper","description":"reassignWrapper","required":true,"schema":{"$ref":"#/definitions/ReassignWrapper"}}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics":{"get":{"tags":["kafka-controller"],"summary":"List topics","operationId":"listTopicsUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/create":{"post":{"tags":["kafka-controller"],"summary":"Create a topic","operationId":"createTopicUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"topic","description":"topic","required":true,"schema":{"$ref":"#/definitions/TopicDetail"}},{"name":"reassignStr","in":"query","description":"reassignStr","required":false,"type":"string"}],"responses":{"201":{"description":"Created","schema":{"$ref":"#/definitions/TopicMeta"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}":{"get":{"tags":["kafka-controller"],"summary":"Describe a topic by fetching the metadata and config","operationId":"describeTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/TopicMeta"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete a topic (you should enable topic deletion","operationId":"deleteTopicUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf":{"get":{"tags":["kafka-controller"],"summary":"Get topic configs","operationId":"getTopicConfigUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"post":{"tags":["kafka-controller"],"summary":"Create topic configs","operationId":"createTopicConfigUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"prop","description":"prop","required":true,"schema":{"type":"object","additionalProperties":{"type":"object"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"put":{"tags":["kafka-controller"],"summary":"Update topic configs","operationId":"updateTopicConfigUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"prop","description":"prop","required":true,"schema":{"type":"object","additionalProperties":{"type":"object"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete topic configs","operationId":"deleteTopicConfigUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"delProps","description":"delProps","required":true,"schema":{"type":"array","items":{"type":"string"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf/{key}":{"get":{"tags":["kafka-controller"],"summary":"Get topic config by key","operationId":"getTopicConfigByKeyUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete a topic config by key","operationId":"deleteTopicConfigByKeyUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"boolean"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf/{key}={value}":{"post":{"tags":["kafka-controller"],"summary":"Create a topic config by key","operationId":"createTopicConfigByKeyUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"},{"name":"value","in":"path","description":"value","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"put":{"tags":["kafka-controller"],"summary":"Update a topic config by key","operationId":"updateTopicConfigByKeyUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"},{"name":"value","in":"path","description":"value","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}/exist":{"get":{"tags":["kafka-controller"],"summary":"Tell if a topic exists","operationId":"existTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"boolean"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}/write":{"post":{"tags":["kafka-controller"],"summary":"Write a message to the topic, for testing purpose","operationId":"writeMessageUsingPOST","consumes":["text/plain"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"message","description":"message","required":true,"schema":{"type":"string"}}],"responses":{"201":{"description":"Created","schema":{"$ref":"#/definitions/GeneralResponse"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topicsbrief":{"get":{"tags":["kafka-controller"],"summary":"List topics Brief","operationId":"listTopicBriefUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/TopicBrief"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/connstate":{"get":{"tags":["zookeeper-controller"],"summary":"Get the connection state of zookeeper","operationId":"zkConnStateUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"string"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/env":{"get":{"tags":["zookeeper-controller"],"summary":"Get the environment information of zookeeper","operationId":"getEnvUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"$ref":"#/definitions/ZkServerEnvironment"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/ls/{path}":{"get":{"tags":["zookeeper-controller"],"summary":"List a zookeeper path","operationId":"lsUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"path","in":"path","description":"path","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/stat":{"get":{"tags":["zookeeper-controller"],"summary":"Get the service state of zookeeper","operationId":"getStatUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"$ref":"#/definitions/ZkServerStat"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}}},"definitions":{"AddPartition":{"type":"object","properties":{"numPartitionsAdded":{"type":"integer","format":"int32"},"replicaAssignment":{"type":"string"},"topic":{"type":"string"}}},"BrokerInfo":{"type":"object","properties":{"endPoints":{"type":"array","items":{"type":"string"}},"host":{"type":"string"},"id":{"type":"integer","format":"int32"},"jmxPort":{"type":"integer","format":"int32"},"port":{"type":"integer","format":"int32"},"rack":{"type":"string"},"securityProtocol":{"type":"object"},"startTime":{"type":"string","format":"date-time"},"version":{"type":"integer","format":"int32"}}},"ConsumerGroupDesc":{"type":"object","properties":{"consumerId":{"type":"string"},"currentOffset":{"type":"integer","format":"int64"},"groupName":{"type":"string"},"host":{"type":"string"},"lag":{"type":"integer","format":"int64"},"logEndOffset":{"type":"integer","format":"int64"},"partitionId":{"type":"integer","format":"int32"},"state":{"type":"string","enum":["RUNNING","PENDING"]},"topic":{"type":"string"},"type":{"type":"string","enum":["NEW","OLD"]}}},"GeneralResponse":{"type":"object","properties":{"msg":{"type":"string"},"state":{"type":"string","enum":["success","failure"]}}},"HostAndPort":{"type":"object","properties":{"hostText":{"type":"string"},"port":{"type":"integer","format":"int32"}}},"Map«int,long»":{"type":"object","additionalProperties":{"type":"integer","format":"int64"}},"ReassignWrapper":{"type":"object","properties":{"brokers":{"type":"array","items":{"type":"integer","format":"int32"}},"topics":{"type":"array","items":{"type":"string"}}}},"TopicAndPartition":{"type":"object"},"TopicBrief":{"type":"object","properties":{"isrRate":{"type":"number","format":"double"},"numPartition":{"type":"integer","format":"int32"},"topic":{"type":"string"}}},"TopicDetail":{"type":"object","properties":{"factor":{"type":"integer","format":"int32"},"name":{"type":"string"},"partitions":{"type":"integer","format":"int32"},"prop":{"type":"object","additionalProperties":{"type":"object"}}}},"TopicMeta":{"type":"object","properties":{"partitionCount":{"type":"integer","format":"int32"},"replicationFactor":{"type":"integer","format":"int32"},"topicCustomConfigs":{"type":"object","additionalProperties":{"type":"object"}},"topicName":{"type":"string"},"topicPartitionInfos":{"type":"array","items":{"$ref":"#/definitions/TopicPartitionInfo"}}}},"TopicPartitionInfo":{"type":"object","properties":{"endOffset":{"type":"integer","format":"int64"},"in_sync":{"type":"boolean"},"isr":{"type":"array","items":{"type":"string"}},"leader":{"type":"string"},"messageAvailable":{"type":"integer","format":"int64"},"partitionId":{"type":"integer","format":"int32"},"replicas":{"type":"array","items":{"type":"string"}},"startOffset":{"type":"integer","format":"int64"}}},"ZkServerClient":{"type":"object","properties":{"host":{"type":"string"},"ops":{"type":"integer","format":"int32"},"port":{"type":"integer","format":"int32"},"queued":{"type":"integer","format":"int32"},"received":{"type":"integer","format":"int32"},"sent":{"type":"integer","format":"int32"}}},"ZkServerEnvironment":{"type":"object","properties":{"attributes":{"type":"object","additionalProperties":{"type":"string"}}}},"ZkServerStat":{"type":"object","properties":{"avgLatency":{"type":"integer","format":"int32"},"buildDate":{"type":"string"},"clients":{"type":"array","items":{"$ref":"#/definitions/ZkServerClient"}},"connections":{"type":"integer","format":"int32"},"maxLatency":{"type":"integer","format":"int32"},"minLatency":{"type":"integer","format":"int32"},"mode":{"type":"string","enum":["Leader","Follower","Observer"]},"nodes":{"type":"integer","format":"int32"},"outstanding":{"type":"integer","format":"int32"},"received":{"type":"integer","format":"int32"},"sent":{"type":"integer","format":"int32"},"version":{"type":"string"},"zxId":{"type":"string"}}}}} \ No newline at end of file +{"swagger":"2.0","info":{"description":"Kafka REST API SwaggerUI","version":"0.1.0","title":"Kafka REST API SwaggerUI","contact":{"name":"gnuhpc","url":"https://github.com/gnuhpc","email":"gnuhpc@gmail.com"}},"host":"localhost:8080","basePath":"/","tags":[{"name":"collector-controller","description":"Rest API for Collecting JMX Metric Data"},{"name":"user-controller","description":"Security User Management Controller."},{"name":"zookeeper-controller","description":"Zookeeper Controller"},{"name":"kafka-controller","description":"Kafka Controller"}],"paths":{"/jmx/v1":{"get":{"tags":["collector-controller"],"summary":"Fetch all JMX metric data","operationId":"collectJMXMetricUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"jmxurl","in":"query","description":"Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'","required":false,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/JMXMetricDataV1"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/jmx/v2":{"post":{"tags":["collector-controller"],"summary":"Fetch JMX metric data with query filter. You can get the query filter template through the API /jmx/v2/filters.","operationId":"collectJMXMetricUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"jmxurl","in":"query","description":"Parameter jmxurl should be a comma-separated list of {IP:Port} or set to 'default'","required":false,"type":"string"},{"in":"body","name":"jmxQuery","description":"jmxQuery","required":true,"schema":{"$ref":"#/definitions/JMXQuery"}}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/JMXMetricData"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/jmx/v2/filters":{"get":{"tags":["collector-controller"],"summary":"List the query filter templates with the filterKey. If filterKey is set to empty, it will return all the templates.","operationId":"listJMXFilterTemplateUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"filterKey","in":"query","description":"filterKey","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/brokers":{"get":{"tags":["kafka-controller"],"summary":"List brokers in this cluster","operationId":"listBrokersUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/BrokerInfo"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumer/{topic}/{partition}/{offset}":{"get":{"tags":["kafka-controller"],"summary":"Get the message from the offset of the partition in the topic, decoder is not supported yet","operationId":"getMessageUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"partition","in":"path","description":"partition","required":true,"type":"integer","format":"int32"},{"name":"offset","in":"path","description":"offset","required":true,"type":"integer","format":"int64"},{"name":"decoder","in":"query","description":"decoder","required":false,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"string"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroup/{consumergroup}":{"delete":{"tags":["kafka-controller"],"summary":"Delete old Consumer Group","operationId":"deleteOldConsumerGroupUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/lastcommittime":{"get":{"tags":["kafka-controller"],"summary":"getLastCommitTimestamp","operationId":"getLastCommitTimestampUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"},{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object","additionalProperties":{"type":"integer","format":"int64"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroup/{consumergroup}/{type}/topic/{topic}/{partition}/{offset}":{"put":{"tags":["kafka-controller"],"summary":"Reset consumer group offset, earliest/latest can be used","operationId":"resetOffsetUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"partition","in":"path","description":"partition","required":true,"type":"integer","format":"int32"},{"name":"consumergroup","in":"path","description":"consumergroup","required":true,"type":"string"},{"name":"offset","in":"path","description":"offset","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups":{"get":{"tags":["kafka-controller"],"summary":"List all consumer groups from zk and kafka","operationId":"listAllConsumerGroupsUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"type","in":"query","description":"type","required":false,"type":"string","enum":["NEW","OLD"]},{"name":"topic","in":"query","description":"topic","required":false,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"array","items":{"type":"string"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}":{"get":{"tags":["kafka-controller"],"summary":"Describe consumer groups, showing lag and offset, may be slow if multi topic are listened","operationId":"describeCGUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"array","items":{"$ref":"#/definitions/ConsumerGroupDesc"}}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}/topic":{"get":{"tags":["kafka-controller"],"summary":"Get the topics involved of the specify consumer group","operationId":"listTopicByCGUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/consumergroups/{consumerGroup}/{type}/topic/{topic}":{"get":{"tags":["kafka-controller"],"summary":"Describe consumer groups by topic, showing lag and offset","operationId":"describeCGByTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"consumerGroup","in":"path","description":"consumerGroup","required":true,"type":"string"},{"name":"type","in":"path","description":"type","required":true,"type":"string","enum":["NEW","OLD"]},{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/ConsumerGroupDesc"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/health":{"get":{"tags":["kafka-controller"],"summary":"Check the cluster health.","operationId":"healthCheckUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/HealthCheckResult"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/add":{"post":{"tags":["kafka-controller"],"summary":"Add a partition to the topic","operationId":"addPartitionUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"addPartition","description":"addPartition","required":true,"schema":{"$ref":"#/definitions/AddPartition"}}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/TopicMeta"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/check":{"put":{"tags":["kafka-controller"],"summary":"Check the partition reassignment process","operationId":"checkReassignPartitionsUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignStr","description":"reassignStr","required":true,"schema":{"type":"string"}}],"responses":{"-1":{"description":"Reassignment Failed"},"0":{"description":"Reassignment In Progress"},"1":{"description":"Reassignment Completed"},"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"integer","format":"int32"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/execute":{"put":{"tags":["kafka-controller"],"summary":"Execute the partition reassignment","operationId":"executeReassignPartitionsUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignStr","description":"reassignStr","required":true,"schema":{"type":"string"}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"integer","format":"int32"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/partitions/reassign/generate":{"post":{"tags":["kafka-controller"],"summary":"Generate plan for the partition reassignment","operationId":"generateReassignPartitionsUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"reassignWrapper","description":"reassignWrapper","required":true,"schema":{"$ref":"#/definitions/ReassignWrapper"}}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics":{"get":{"tags":["kafka-controller"],"summary":"List topics","operationId":"listTopicsUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/create":{"post":{"tags":["kafka-controller"],"summary":"Create a topic","operationId":"createTopicUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"topic","description":"topic","required":true,"schema":{"$ref":"#/definitions/TopicDetail"}},{"name":"reassignStr","in":"query","description":"reassignStr","required":false,"type":"string"}],"responses":{"201":{"description":"Created","schema":{"$ref":"#/definitions/TopicMeta"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}":{"get":{"tags":["kafka-controller"],"summary":"Describe a topic by fetching the metadata and config","operationId":"describeTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/TopicMeta"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete a topic (you should enable topic deletion","operationId":"deleteTopicUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf":{"get":{"tags":["kafka-controller"],"summary":"Get topic configs","operationId":"getTopicConfigUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"post":{"tags":["kafka-controller"],"summary":"Create topic configs","operationId":"createTopicConfigUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"prop","description":"prop","required":true,"schema":{"type":"object","additionalProperties":{"type":"object"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"put":{"tags":["kafka-controller"],"summary":"Update topic configs","operationId":"updateTopicConfigUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"prop","description":"prop","required":true,"schema":{"type":"object","additionalProperties":{"type":"object"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete topic configs","operationId":"deleteTopicConfigUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"delProps","description":"delProps","required":true,"schema":{"type":"array","items":{"type":"string"}}}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf/{key}":{"get":{"tags":["kafka-controller"],"summary":"Get topic config by key","operationId":"getTopicConfigByKeyUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"delete":{"tags":["kafka-controller"],"summary":"Delete a topic config by key","operationId":"deleteTopicConfigByKeyUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"boolean"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/kafka/topics/{topic}/conf/{key}={value}":{"post":{"tags":["kafka-controller"],"summary":"Create a topic config by key","operationId":"createTopicConfigByKeyUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"},{"name":"value","in":"path","description":"value","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"put":{"tags":["kafka-controller"],"summary":"Update a topic config by key","operationId":"updateTopicConfigByKeyUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"name":"key","in":"path","description":"key","required":true,"type":"string"},{"name":"value","in":"path","description":"value","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"object"}}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}/exist":{"get":{"tags":["kafka-controller"],"summary":"Tell if a topic exists","operationId":"existTopicUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"boolean"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topics/{topic}/write":{"post":{"tags":["kafka-controller"],"summary":"Write a message to the topic, for testing purpose","operationId":"writeMessageUsingPOST","consumes":["text/plain"],"produces":["*/*"],"parameters":[{"name":"topic","in":"path","description":"topic","required":true,"type":"string"},{"in":"body","name":"message","description":"message","required":true,"schema":{"type":"string"}}],"responses":{"201":{"description":"Created","schema":{"$ref":"#/definitions/GeneralResponse"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/kafka/topicsbrief":{"get":{"tags":["kafka-controller"],"summary":"List topics Brief","operationId":"listTopicBriefUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"$ref":"#/definitions/TopicBrief"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/users":{"get":{"tags":["user-controller"],"summary":"Get user list.","operationId":"listUserUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"post":{"tags":["user-controller"],"summary":"Add user.","operationId":"addUserUsingPOST","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"user","description":"user","required":true,"schema":{"$ref":"#/definitions/User"}}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}},"put":{"tags":["user-controller"],"summary":"Modify user information.","operationId":"modifyUserUsingPUT","consumes":["application/json"],"produces":["*/*"],"parameters":[{"in":"body","name":"user","description":"user","required":true,"schema":{"$ref":"#/definitions/User"}}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"201":{"description":"Created"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/users/{username}":{"delete":{"tags":["user-controller"],"summary":"Delete user.","operationId":"delUserUsingDELETE","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"username","in":"path","description":"username","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"$ref":"#/definitions/GeneralResponse"}},"204":{"description":"No Content"},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"}}}},"/zk/connstate":{"get":{"tags":["zookeeper-controller"],"summary":"Get the connection state of zookeeper","operationId":"zkConnStateUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"string"}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/env":{"get":{"tags":["zookeeper-controller"],"summary":"Get the environment information of zookeeper","operationId":"getEnvUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"$ref":"#/definitions/ZkServerEnvironment"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/get/path":{"get":{"tags":["zookeeper-controller"],"summary":"Get data of a zookeeper path","operationId":"getUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"path","in":"query","description":"path","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/ls/path":{"get":{"tags":["zookeeper-controller"],"summary":"List a zookeeper path","operationId":"lsUsingGET","consumes":["application/json"],"produces":["*/*"],"parameters":[{"name":"path","in":"query","description":"path","required":true,"type":"string"}],"responses":{"200":{"description":"OK","schema":{"type":"array","items":{"type":"string"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}},"/zk/stat":{"get":{"tags":["zookeeper-controller"],"summary":"Get the service state of zookeeper","operationId":"getStatUsingGET","consumes":["application/json"],"produces":["*/*"],"responses":{"200":{"description":"OK","schema":{"type":"object","additionalProperties":{"$ref":"#/definitions/ZkServerStat"}}},"401":{"description":"Unauthorized"},"403":{"description":"Forbidden"},"404":{"description":"Not Found"}}}}},"definitions":{"AddPartition":{"type":"object","properties":{"numPartitionsAdded":{"type":"integer","format":"int32"},"replicaAssignment":{"type":"string"},"topic":{"type":"string"}}},"BrokerInfo":{"type":"object","properties":{"endPoints":{"type":"array","items":{"type":"string"}},"host":{"type":"string"},"id":{"type":"integer","format":"int32"},"jmxPort":{"type":"integer","format":"int32"},"port":{"type":"integer","format":"int32"},"rack":{"type":"string"},"securityProtocol":{"type":"object"},"startTime":{"type":"string","format":"date-time"},"version":{"type":"integer","format":"int32"}}},"ConsumerGroupDesc":{"type":"object","properties":{"consumerId":{"type":"string"},"currentOffset":{"type":"integer","format":"int64"},"groupName":{"type":"string"},"host":{"type":"string"},"lag":{"type":"integer","format":"int64"},"logEndOffset":{"type":"integer","format":"int64"},"partitionId":{"type":"integer","format":"int32"},"state":{"type":"string","enum":["RUNNING","PENDING"]},"topic":{"type":"string"},"type":{"type":"string","enum":["NEW","OLD"]}}},"GeneralResponse":{"type":"object","properties":{"msg":{"type":"string"},"state":{"type":"string","enum":["success","failure"]}}},"HashMap«string,object»":{"type":"object","additionalProperties":{"type":"object"}},"HealthCheckResult":{"type":"object","properties":{"msg":{"type":"string"},"status":{"type":"string"},"timestamp":{"type":"string","example":"yyyy-MM-dd HH:mm:ss"}}},"HostAndPort":{"type":"object","properties":{"hostText":{"type":"string"},"port":{"type":"integer","format":"int32"}}},"JMXConfiguration":{"type":"object","properties":{"include":{"$ref":"#/definitions/JMXFilter"},"exclude":{"$ref":"#/definitions/JMXFilter"}}},"JMXFilter":{"type":"object","properties":{"attribute":{"type":"object"},"beanNames":{"type":"array","items":{"type":"string"}},"beanRegexes":{"type":"array","items":{"$ref":"#/definitions/Pattern"}},"domain":{"type":"string"},"domainRegex":{"$ref":"#/definitions/Pattern"},"emptyBeanName":{"type":"boolean"},"filter":{"type":"object","additionalProperties":{"type":"object"}}}},"JMXMetricData":{"type":"object","properties":{"collected":{"type":"boolean"},"host":{"type":"string"},"metrics":{"type":"array","items":{"$ref":"#/definitions/HashMap«string,object»"}},"msg":{"type":"string"},"timestamp":{"type":"string","example":"yyyy-MM-dd HH:mm:ss"}}},"JMXMetricDataV1":{"type":"object","properties":{"collected":{"type":"boolean"},"host":{"type":"string"},"mbeanInfo":{"type":"object"},"msg":{"type":"string"},"timestamp":{"type":"string","example":"yyyy-MM-dd HH:mm:ss"}}},"JMXQuery":{"type":"object","properties":{"filters":{"type":"array","items":{"$ref":"#/definitions/JMXConfiguration"}}}},"Map«int,long»":{"type":"object","additionalProperties":{"type":"integer","format":"int64"}},"Pattern":{"type":"object","properties":{"cursor":{"type":"integer","format":"int32"}}},"ReassignWrapper":{"type":"object","properties":{"brokers":{"type":"array","items":{"type":"integer","format":"int32"}},"topics":{"type":"array","items":{"type":"string"}}}},"TopicAndPartition":{"type":"object"},"TopicBrief":{"type":"object","properties":{"isrRate":{"type":"number","format":"double"},"numPartition":{"type":"integer","format":"int32"},"topic":{"type":"string"}}},"TopicDetail":{"type":"object","properties":{"factor":{"type":"integer","format":"int32"},"name":{"type":"string"},"partitions":{"type":"integer","format":"int32"},"prop":{"type":"object","additionalProperties":{"type":"object"}}}},"TopicMeta":{"type":"object","properties":{"partitionCount":{"type":"integer","format":"int32"},"replicationFactor":{"type":"integer","format":"int32"},"topicCustomConfigs":{"type":"object","additionalProperties":{"type":"object"}},"topicName":{"type":"string"},"topicPartitionInfos":{"type":"array","items":{"$ref":"#/definitions/TopicPartitionInfo"}}}},"TopicPartitionInfo":{"type":"object","properties":{"endOffset":{"type":"integer","format":"int64"},"in_sync":{"type":"boolean"},"isr":{"type":"array","items":{"type":"string"}},"leader":{"type":"string"},"messageAvailable":{"type":"integer","format":"int64"},"partitionId":{"type":"integer","format":"int32"},"replicas":{"type":"array","items":{"type":"string"}},"startOffset":{"type":"integer","format":"int64"}}},"User":{"type":"object","properties":{"username":{"type":"string"},"password":{"type":"string"},"role":{"type":"string"}}},"ZkServerClient":{"type":"object","properties":{"host":{"type":"string"},"ops":{"type":"integer","format":"int32"},"port":{"type":"integer","format":"int32"},"queued":{"type":"integer","format":"int32"},"received":{"type":"integer","format":"int32"},"sent":{"type":"integer","format":"int32"}}},"ZkServerEnvironment":{"type":"object","properties":{"attributes":{"type":"object","additionalProperties":{"type":"string"}}}},"ZkServerStat":{"type":"object","properties":{"avgLatency":{"type":"integer","format":"int32"},"buildDate":{"type":"string"},"clients":{"type":"array","items":{"$ref":"#/definitions/ZkServerClient"}},"connections":{"type":"integer","format":"int32"},"maxLatency":{"type":"integer","format":"int32"},"minLatency":{"type":"integer","format":"int32"},"mode":{"type":"string","enum":["Leader","Follower","Observer","Standalone"]},"nodes":{"type":"integer","format":"int32"},"outstanding":{"type":"integer","format":"int32"},"received":{"type":"integer","format":"int32"},"sent":{"type":"integer","format":"int32"},"version":{"type":"string"},"zxId":{"type":"string"}}}}} \ No newline at end of file diff --git a/src/main/java/org/gnuhpc/bigdata/config/JMXConfig.java b/src/main/java/org/gnuhpc/bigdata/config/JMXConfig.java new file mode 100644 index 0000000..27c0258 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/config/JMXConfig.java @@ -0,0 +1,11 @@ +package org.gnuhpc.bigdata.config; + +import org.gnuhpc.bigdata.utils.CommonUtils; + +import java.io.File; + +public class JMXConfig { + public static final String JMX_CONNECT_TIMEOUT = "attribute.remote.x.request.waiting.timeout"; + public static final String JMX_PROTOCOL = "service:jmx:rmi:///jndi/rmi://"; + public static final String JMX_FILTER_DIR = CommonUtils.PROJECT_ROOT_FOLDER + File.separator + "JMXFilterTemplate"; +} diff --git a/src/main/java/org/gnuhpc/bigdata/config/KafkaConfig.java b/src/main/java/org/gnuhpc/bigdata/config/KafkaConfig.java index 5d42ca2..35d4388 100644 --- a/src/main/java/org/gnuhpc/bigdata/config/KafkaConfig.java +++ b/src/main/java/org/gnuhpc/bigdata/config/KafkaConfig.java @@ -1,6 +1,7 @@ package org.gnuhpc.bigdata.config; import lombok.Data; +import lombok.Getter; import lombok.extern.log4j.Log4j; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; @@ -32,6 +33,7 @@ @Data @EnableKafka @Configuration +@Getter public class KafkaConfig { @Value("${kafka.brokers}") private String brokers; @@ -42,10 +44,12 @@ public class KafkaConfig { @Value("${kafka.offset.partitions}") private int internalTopicPartitions; - @Value("${spring.kafka.consumer.group-id}") private String groupId; + @Value("${kafka.healthcheck.topic}") + private String healthCheckTopic; + @Bean(initMethod = "init", destroyMethod = "destroy") public KafkaUtils kafkaUtils() { return new KafkaUtils(); diff --git a/src/main/java/org/gnuhpc/bigdata/config/WebSecurityConfig.java b/src/main/java/org/gnuhpc/bigdata/config/WebSecurityConfig.java new file mode 100644 index 0000000..5b95330 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/config/WebSecurityConfig.java @@ -0,0 +1,70 @@ +package org.gnuhpc.bigdata.config; + +import org.gnuhpc.bigdata.security.BasicAuthenticationPoint; +import org.gnuhpc.bigdata.security.UserDetailsServiceImp; +import org.gnuhpc.bigdata.utils.CommonUtils; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.http.HttpMethod; +import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder; +import org.springframework.security.config.annotation.web.builders.HttpSecurity; +import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity; +import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter; +import org.springframework.security.config.http.SessionCreationPolicy; +import org.springframework.security.core.userdetails.UserDetailsService; +import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; + +import java.io.File; + +@Configuration +@EnableWebSecurity +public class WebSecurityConfig extends WebSecurityConfigurerAdapter { + public static final String SECURITY_FILE_PATH = CommonUtils.PROJECT_ROOT_FOLDER + File.separator + + "security" + File.separator + "security.yml"; + + @Autowired + private BasicAuthenticationPoint basicAuthenticationPoint; + + @Value("${server.security.check}") + private boolean securityCheck; + @Value("${server.security.checkInitDelay}") + private int checkInitDelay; + @Value("${server.security.checkSecurityInterval}") + private int checkSecurityInterval; + + @Bean + public UserDetailsService userDetailsService() { + return new UserDetailsServiceImp(securityCheck, checkInitDelay, checkSecurityInterval); + }; + + @Bean + public BCryptPasswordEncoder passwordEncoder() { + return new BCryptPasswordEncoder(); + }; + + @Override + protected void configure(HttpSecurity http) throws Exception { + http.csrf().disable(); + if (securityCheck) { + http.authorizeRequests().antMatchers("/api", "/swagger-ui.html", "/webjars/**", "/swagger-resources/**", "/v2/**").permitAll() + .antMatchers(HttpMethod.GET, "/**").permitAll() + .anyRequest().authenticated(); + http.httpBasic().authenticationEntryPoint(basicAuthenticationPoint); + http.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS); + } else { + http.authorizeRequests().antMatchers("/**").permitAll() + .anyRequest().authenticated(); + } + } + + @Autowired + public void configureGlobal(AuthenticationManagerBuilder auth) throws Exception { + auth.userDetailsService(userDetailsService()).passwordEncoder(passwordEncoder()); + } + + public static void main(String[] args) { + //System.out.println(new BCryptPasswordEncoder().encode("admin1234")); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/constant/ZkServerMode.java b/src/main/java/org/gnuhpc/bigdata/constant/ZkServerMode.java index 6fc7bc4..033c0a0 100644 --- a/src/main/java/org/gnuhpc/bigdata/constant/ZkServerMode.java +++ b/src/main/java/org/gnuhpc/bigdata/constant/ZkServerMode.java @@ -3,5 +3,6 @@ public enum ZkServerMode { Leader, Follower, - Observer + Observer, + Standalone } diff --git a/src/main/java/org/gnuhpc/bigdata/controller/CollectorController.java b/src/main/java/org/gnuhpc/bigdata/controller/CollectorController.java new file mode 100644 index 0000000..c0ca1a9 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/controller/CollectorController.java @@ -0,0 +1,63 @@ +package org.gnuhpc.bigdata.controller; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import io.swagger.annotations.ApiParam; +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.model.JMXMetricData; +import org.gnuhpc.bigdata.model.JMXMetricDataV1; +import org.gnuhpc.bigdata.model.JMXQuery; +import org.gnuhpc.bigdata.service.CollectorService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.validation.annotation.Validated; +import org.springframework.web.bind.annotation.*; + +import javax.validation.constraints.Pattern; +import java.util.HashMap; +import java.util.List; + +@Log4j +@RestController +@Validated +@Api(value = "/jmx", description = "Rest API for Collecting JMX Metric Data") +public class CollectorController { + private static final String IP_AND_PORT_LIST_REGEX = "(([0-9]+(?:\\.[0-9]+){3}:[0-9]+,)*([0-9]+(?:\\.[0-9]+){3}:[0-9]+)+)|(default)"; + @Autowired + private CollectorService collectorService; + @Value("${jmx.kafka.jmxurl}") + private String jmxKafkaURL; + + @GetMapping("/jmx/v1") + @ApiOperation(value = "Fetch all JMX metric data") + public List collectJMXMetric( + @Pattern(regexp = IP_AND_PORT_LIST_REGEX)@RequestParam @ApiParam( + value = "Parameter jmxurl should be a comma-separated list of {IP:Port} or set to \'default\'")String jmxurl) { + if(jmxurl.equals("default")) { + jmxurl = jmxKafkaURL; + } + + log.debug("Collect JMX Metric Data Started."); + return collectorService.collectJMXData(jmxurl); + } + + @PostMapping("/jmx/v2") + @ApiOperation(value = "Fetch JMX metric data with query filter. You can get the query filter template through the API /jmx/v2/filters.") + public List collectJMXMetric(@Pattern(regexp = IP_AND_PORT_LIST_REGEX)@RequestParam + @ApiParam(value = "Parameter jmxurl should be a comma-separated list of {IP:Port} or set to \'default\'")String jmxurl, + @RequestBody JMXQuery jmxQuery) { + if(jmxurl.equals("default")) { + jmxurl = jmxKafkaURL; + } + + log.debug("Collect JMX Metric Data Started."); + + return collectorService.collectJMXData(jmxurl, jmxQuery); + } + + @GetMapping("/jmx/v2/filters") + @ApiOperation(value = "List the query filter templates with the filterKey. If filterKey is set to empty, it will return all the templates.") + public HashMap listJMXFilterTemplate(@RequestParam String filterKey) { + return collectorService.listJMXFilterTemplate(filterKey); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/controller/CustomErrorController.java b/src/main/java/org/gnuhpc/bigdata/controller/CustomErrorController.java index 3a17323..ac6fc0e 100644 --- a/src/main/java/org/gnuhpc/bigdata/controller/CustomErrorController.java +++ b/src/main/java/org/gnuhpc/bigdata/controller/CustomErrorController.java @@ -1,10 +1,12 @@ package org.gnuhpc.bigdata.controller; import org.gnuhpc.bigdata.exception.ErrorJson; +import org.gnuhpc.bigdata.exception.RestErrorResponse; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.web.ErrorAttributes; import org.springframework.boot.autoconfigure.web.ErrorController; +import org.springframework.http.HttpStatus; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; import org.springframework.web.context.request.RequestAttributes; @@ -27,10 +29,14 @@ public class CustomErrorController implements ErrorController { private ErrorAttributes errorAttributes; @RequestMapping(value = PATH) - public ErrorJson error(HttpServletRequest request, HttpServletResponse response) { + public RestErrorResponse error(HttpServletRequest request, HttpServletResponse response) { // Appropriate HTTP response code (e.g. 404 or 500) is automatically set by Spring. // Here we just define response body. - return new ErrorJson(response.getStatus(), getErrorAttributes(request, debug)); + //return new ErrorJson(response.getStatus(), getErrorAttributes(request, debug)); + return new RestErrorResponse(HttpStatus.valueOf(response.getStatus()), response.getStatus(), + (String)getErrorAttributes(request, debug).get("message"), + (String)getErrorAttributes(request, debug).get("trace"), + ""); } @Override diff --git a/src/main/java/org/gnuhpc/bigdata/controller/KafkaController.java b/src/main/java/org/gnuhpc/bigdata/controller/KafkaController.java index 40f0374..8872643 100644 --- a/src/main/java/org/gnuhpc/bigdata/controller/KafkaController.java +++ b/src/main/java/org/gnuhpc/bigdata/controller/KafkaController.java @@ -200,7 +200,7 @@ public Map checkReassignPartitions(@RequestBody Stri public Map> listAllConsumerGroups( @RequestParam(required = false) ConsumerType type, @RequestParam(required = false) String topic - ) throws Exception { + ) { if(topic!=null){ return kafkaAdminService.listConsumerGroupsByTopic(topic,type); } else{ @@ -271,4 +271,10 @@ private void isTopicExist(String topic) throws InvalidTopicException { throw new InvalidTopicException("Topic " + topic + " non-exist!"); } } + + @GetMapping(value = "/health") + @ApiOperation(value = "Check the cluster health.") + public HealthCheckResult healthCheck() { + return kafkaAdminService.healthCheck(); + } } diff --git a/src/main/java/org/gnuhpc/bigdata/controller/UserController.java b/src/main/java/org/gnuhpc/bigdata/controller/UserController.java new file mode 100644 index 0000000..1fd2045 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/controller/UserController.java @@ -0,0 +1,56 @@ +package org.gnuhpc.bigdata.controller; + +import io.swagger.annotations.Api; +import io.swagger.annotations.ApiOperation; +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.constant.GeneralResponseState; +import org.gnuhpc.bigdata.model.GeneralResponse; +import org.gnuhpc.bigdata.model.User; +import org.gnuhpc.bigdata.service.UserService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.validation.BindingResult; +import org.springframework.web.bind.annotation.*; + +import javax.validation.Valid; +import java.util.List; + +@Log4j +@RestController +@Api(value = "/users", description = "Security User Management Controller.") +public class UserController { + @Autowired + private UserService userService; + + @GetMapping("/users") + @ApiOperation(value = "Get user list.") + public List listUser() { + return userService.listUser(); + } + + @PostMapping("/users") + @ApiOperation(value = "Add user.") + public GeneralResponse addUser(@RequestBody@Valid User user, BindingResult results) { + if (results.hasErrors()) { + return new GeneralResponse(GeneralResponseState.failure, results.getFieldError().getDefaultMessage()); + } + log.info("Receive add user request: username:" + user.getUsername()); + return userService.addUser(user); + } + + @PutMapping("/users") + @ApiOperation(value = "Modify user information.") + public GeneralResponse modifyUser(@RequestBody@Valid User user, BindingResult results) { + if (results.hasErrors()) { + return new GeneralResponse(GeneralResponseState.failure, results.getFieldError().getDefaultMessage()); + } + log.info("Receive modify user request: username:" + user.getUsername()); + return userService.modifyUser(user); + } + + @DeleteMapping("/users/{username}") + @ApiOperation(value = "Delete user.") + public GeneralResponse delUser(@PathVariable String username) { + log.info("Receive delete user request: username:" + username); + return userService.delUser(username); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/controller/ZookeeperController.java b/src/main/java/org/gnuhpc/bigdata/controller/ZookeeperController.java index 4762376..05e51b3 100644 --- a/src/main/java/org/gnuhpc/bigdata/controller/ZookeeperController.java +++ b/src/main/java/org/gnuhpc/bigdata/controller/ZookeeperController.java @@ -7,8 +7,12 @@ import org.gnuhpc.bigdata.model.ZkServerStat; import org.gnuhpc.bigdata.service.ZookeeperService; import org.gnuhpc.bigdata.utils.ZookeeperUtils; +import org.gnuhpc.bigdata.validator.ZKNodePathExistConstraint; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.web.bind.annotation.*; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.RestController; import java.util.List; import java.util.Map; @@ -26,16 +30,25 @@ public class ZookeeperController { @Autowired private ZookeeperService zookeeperService; - @GetMapping("/ls/{path}") + @GetMapping("/ls/path") @ApiOperation(value = "List a zookeeper path") - public List ls(@PathVariable("path") String path){ + public List ls(@RequestParam String path){ + return zookeeperUtils.lsPath(path); + /* try { - return zookeeperUtils.getCuratorClient().getChildren().forPath("/"+path); + return zookeeperUtils.getCuratorClient().getChildren().forPath(path); } catch (Exception e) { e.printStackTrace(); } return null; + */ + } + + @GetMapping("/get/path") + @ApiOperation(value = "Get data of a zookeeper path") + public Map get(@RequestParam String path){ + return zookeeperUtils.getNodeData(path); } @GetMapping("/connstate") diff --git a/src/main/java/org/gnuhpc/bigdata/exception/CollectorException.java b/src/main/java/org/gnuhpc/bigdata/exception/CollectorException.java new file mode 100644 index 0000000..b68974f --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/exception/CollectorException.java @@ -0,0 +1,33 @@ +package org.gnuhpc.bigdata.exception; + +public class CollectorException extends Exception { + public CollectorException(String message, Throwable cause) { + super(message, cause); + } + + public CollectorException(String message) { + super(message); + } + + public CollectorException(Throwable cause) { + super(cause); + } + + public CollectorException() { + super(); + } + + public String catchStackTrace() { + String stackTraceString = ""; + StackTraceElement[] stackElements = this.getStackTrace(); + if (stackElements != null) { + for (int i = 0; i < stackElements.length; i++) { + stackTraceString = stackTraceString + stackElements[i].getClassName()+"\\/t"; + stackTraceString = stackTraceString + stackElements[i].getFileName()+"\\/t"; + stackTraceString = stackTraceString + stackElements[i].getLineNumber()+"\\/t"; + stackTraceString = stackTraceString + stackElements[i].getMethodName()+"\\/t"; + } + } + return stackTraceString; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/exception/GlobalExceptionHandler.java b/src/main/java/org/gnuhpc/bigdata/exception/GlobalExceptionHandler.java new file mode 100644 index 0000000..b9f06c8 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/exception/GlobalExceptionHandler.java @@ -0,0 +1,342 @@ +package org.gnuhpc.bigdata.exception; + +import lombok.extern.log4j.Log4j; +import org.springframework.beans.ConversionNotSupportedException; +import org.springframework.beans.TypeMismatchException; +import org.springframework.http.*; +import org.springframework.http.converter.HttpMessageNotReadableException; +import org.springframework.http.converter.HttpMessageNotWritableException; +import org.springframework.util.CollectionUtils; +import org.springframework.validation.BindException; +import org.springframework.web.HttpMediaTypeNotAcceptableException; +import org.springframework.web.HttpMediaTypeNotSupportedException; +import org.springframework.web.HttpRequestMethodNotSupportedException; +import org.springframework.web.bind.MethodArgumentNotValidException; +import org.springframework.web.bind.MissingPathVariableException; +import org.springframework.web.bind.MissingServletRequestParameterException; +import org.springframework.web.bind.ServletRequestBindingException; +import org.springframework.web.bind.annotation.ControllerAdvice; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.context.request.ServletWebRequest; +import org.springframework.web.context.request.WebRequest; +import org.springframework.web.context.request.async.AsyncRequestTimeoutException; +import org.springframework.web.multipart.support.MissingServletRequestPartException; +import org.springframework.web.servlet.NoHandlerFoundException; +import org.springframework.web.servlet.mvc.method.annotation.ResponseEntityExceptionHandler; +import org.springframework.web.util.WebUtils; + +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import javax.validation.ConstraintViolationException; +import java.util.List; +import java.util.Set; + +@Log4j +@ControllerAdvice +public class GlobalExceptionHandler extends ResponseEntityExceptionHandler { + /** + * A single place to customize the response body of all Exception types. + *

The default implementation sets the {@link WebUtils#ERROR_EXCEPTION_ATTRIBUTE} + * request attribute and creates a {@link ResponseEntity} from the given + * body, headers, and status. + * @param ex the exception + * @param body the body for the response + * @param headers the headers for the response + * @param status the response status + * @param request the current request + */ + @Override + protected ResponseEntity handleExceptionInternal(Exception ex, Object body, + HttpHeaders headers, HttpStatus status, WebRequest request) { + + if (HttpStatus.INTERNAL_SERVER_ERROR.equals(status)) { + request.setAttribute(WebUtils.ERROR_EXCEPTION_ATTRIBUTE, ex, WebRequest.SCOPE_REQUEST); + } + String error = "Internal Server Error"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.INTERNAL_SERVER_ERROR, error ,ex)); + } + + /** + * Customize the response for HttpRequestMethodNotSupportedException. + *

This method logs a warning, sets the "Allow" header. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param webRequest the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleHttpRequestMethodNotSupported(HttpRequestMethodNotSupportedException ex, + HttpHeaders headers, HttpStatus status, WebRequest webRequest) { + pageNotFoundLogger.warn(ex.getMessage()); + + ServletWebRequest servletRequest = (ServletWebRequest) webRequest; + HttpServletRequest request = servletRequest.getNativeRequest(HttpServletRequest.class); + StringBuilder builder = new StringBuilder(); + builder.append("Request method: " + request.getMethod()+ " is not supported. Supported Methods: "); + Set supportedMethods = ex.getSupportedHttpMethods(); + supportedMethods.forEach(m -> builder.append(m).append(", ")); + + if (!CollectionUtils.isEmpty(supportedMethods)) { + headers.setAllow(supportedMethods); + } + return buildResponseEntity(new RestErrorResponse(HttpStatus.METHOD_NOT_ALLOWED, builder.substring(0, builder.length() - 2), ex)); + } + + /** + * Customize the response for HttpMediaTypeNotSupportedException. + *

This method sets the "Accept" header. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleHttpMediaTypeNotSupported(HttpMediaTypeNotSupportedException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + StringBuilder builder = new StringBuilder(); + builder.append(ex.getContentType()); + builder.append(" media type is not supported. Supported media types: "); + List mediaTypes = ex.getSupportedMediaTypes(); + mediaTypes.forEach(t -> builder.append(t).append(", ")); + + if (!CollectionUtils.isEmpty(mediaTypes)) { + headers.setAccept(mediaTypes); + } + + return buildResponseEntity(new RestErrorResponse(HttpStatus.UNSUPPORTED_MEDIA_TYPE, + builder.substring(0, builder.length() - 2), ex)); + } + + /** + * Customize the response for HttpMediaTypeNotAcceptableException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleHttpMediaTypeNotAcceptable(HttpMediaTypeNotAcceptableException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + String error = "Media Type not Acceptable"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.NOT_ACCEPTABLE, error ,ex)); + } + + /** + * Customize the response for MissingPathVariableException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + * @since 4.2 + */ + @Override + protected ResponseEntity handleMissingPathVariable(MissingPathVariableException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + String error = "Path Variable : " + ex.getVariableName() + " is missing"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.BAD_REQUEST, error, ex)); + } + + /** + * Customize the response for MissingServletRequestParameterException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleMissingServletRequestParameter(MissingServletRequestParameterException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + String error = ex.getParameterName() + " parameter is missing"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.BAD_REQUEST, error, ex)); + } + + /** + * Customize the response for ServletRequestBindingException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleServletRequestBindingException(ServletRequestBindingException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + String error = "ServletRequest Bind Error"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.BAD_REQUEST, error ,ex)); + } + + /** + * Customize the response for ConversionNotSupportedException. + *

This method delegates to {@link #handleExceptionInternal}. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return a {@code ResponseEntity} instance + */ + @Override + protected ResponseEntity handleConversionNotSupported(ConversionNotSupportedException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + return handleExceptionInternal(ex, null, headers, status, request); + } + + /** + * Customize the response for TypeMismatchException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleTypeMismatch(TypeMismatchException ex, HttpHeaders headers, + HttpStatus status, WebRequest request) { + String error = "Request parameter value type mismatch error. "; + return buildResponseEntity(new RestErrorResponse(HttpStatus.BAD_REQUEST, error ,ex)); + } + + /** + * Customize the response for HttpMessageNotReadableException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleHttpMessageNotReadable(HttpMessageNotReadableException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + String error = "Malformed JSON request"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.BAD_REQUEST, error, ex)); + } + + /** + * Customize the response for HttpMessageNotWritableException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleHttpMessageNotWritable(HttpMessageNotWritableException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + + String error = "Error writing JSON output"; + return buildResponseEntity(new RestErrorResponse(HttpStatus.INTERNAL_SERVER_ERROR, error, ex)); + } + + /** + * Customize the response for MethodArgumentNotValidException. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleMethodArgumentNotValid(MethodArgumentNotValidException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + + String error = "Method Argument Validation Error."; + RestErrorResponse restErrorResponse = new RestErrorResponse(HttpStatus.BAD_REQUEST, error, ex); + restErrorResponse.addValidationErrors(ex.getBindingResult().getFieldErrors()); + restErrorResponse.addValidationError(ex.getBindingResult().getGlobalErrors()); + return buildResponseEntity(restErrorResponse); + } + + @ExceptionHandler(ConstraintViolationException.class) + public ResponseEntity handleConstraintViolation(ConstraintViolationException ex){ + String error = "Constraint Violation Error."; + RestErrorResponse restErrorResponse = new RestErrorResponse(HttpStatus.BAD_REQUEST, error, ex); + restErrorResponse.addValidationErrors(ex.getConstraintViolations()); + return buildResponseEntity(restErrorResponse); + } + + /** + * Customize the response for MissingServletRequestPartException. + *

This method delegates to {@link #handleExceptionInternal}. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return the RestErrorResponse Object + */ + @Override + protected ResponseEntity handleMissingServletRequestPart(MissingServletRequestPartException ex, + HttpHeaders headers, HttpStatus status, WebRequest request) { + + return handleExceptionInternal(ex, null, headers, status, request); + } + + /** + * Customize the response for BindException. + *

This method delegates to {@link #handleExceptionInternal}. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return a {@code ResponseEntity} instance + */ + @Override + protected ResponseEntity handleBindException(BindException ex, HttpHeaders headers, + HttpStatus status, WebRequest request) { + + return handleExceptionInternal(ex, null, headers, status, request); + } + + /** + * Customize the response for NoHandlerFoundException. + *

This method delegates to {@link #handleExceptionInternal}. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param request the current request + * @return a {@code ResponseEntity} instance + * @since 4.0 + */ + @Override + protected ResponseEntity handleNoHandlerFoundException( + NoHandlerFoundException ex, HttpHeaders headers, HttpStatus status, WebRequest request) { + + return handleExceptionInternal(ex, null, headers, status, request); + } + + /** + * Customize the response for NoHandlerFoundException. + *

This method delegates to {@link #handleExceptionInternal}. + * @param ex the exception + * @param headers the headers to be written to the response + * @param status the selected response status + * @param webRequest the current request + * @return a {@code ResponseEntity} instance + * @since 4.2.8 + */ + @Override + protected ResponseEntity handleAsyncRequestTimeoutException( + AsyncRequestTimeoutException ex, HttpHeaders headers, HttpStatus status, WebRequest webRequest) { + + if (webRequest instanceof ServletWebRequest) { + ServletWebRequest servletRequest = (ServletWebRequest) webRequest; + HttpServletRequest request = servletRequest.getNativeRequest(HttpServletRequest.class); + HttpServletResponse response = servletRequest.getNativeResponse(HttpServletResponse.class); + if (response.isCommitted()) { + if (logger.isErrorEnabled()) { + logger.error("Async timeout for " + request.getMethod() + " [" + request.getRequestURI() + "]"); + } + return null; + } + } + + return handleExceptionInternal(ex, null, headers, status, webRequest); + } + + private ResponseEntity buildResponseEntity(RestErrorResponse restErrorResponse) { + return new ResponseEntity(restErrorResponse, restErrorResponse.getStatus()); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/exception/KafkaExceptionHandler.java b/src/main/java/org/gnuhpc/bigdata/exception/KafkaExceptionHandler.java index a3c649a..2e1972f 100644 --- a/src/main/java/org/gnuhpc/bigdata/exception/KafkaExceptionHandler.java +++ b/src/main/java/org/gnuhpc/bigdata/exception/KafkaExceptionHandler.java @@ -14,17 +14,6 @@ @Log4j @RestControllerAdvice public class KafkaExceptionHandler { - @ExceptionHandler(Exception.class) - public RestErrorResponse handleException(Exception ex){ - RestErrorResponse.Builder builder = new RestErrorResponse.Builder(); - RestErrorResponse response = builder - .setCode(KafkaErrorCode.UNKNOWN.ordinal()) - .setMessage("Default Exception happened!") - .setDeveloperMessage(ex.getMessage()) - .setStatus(HttpStatus.SERVICE_UNAVAILABLE).build(); - return response; - } - @ExceptionHandler(ApiException.class) public RestErrorResponse kafkaApiException(ApiException ex) { RestErrorResponse.Builder responseBuilder = new RestErrorResponse.Builder(); @@ -34,43 +23,4 @@ public RestErrorResponse kafkaApiException(ApiException ex) { .setDeveloperMessage(ex.getMessage()) .build(); } - - @ExceptionHandler(RuntimeException.class) - public RestErrorResponse runtimeException(RuntimeException ex){ - RestErrorResponse.Builder responseBuilder = new RestErrorResponse.Builder(); - return responseBuilder.setStatus(HttpStatus.SERVICE_UNAVAILABLE) - .setCode(KafkaErrorCode.UNKNOWN.ordinal()) - .setMessage("Runtime Exception happened!") - .setDeveloperMessage(ex.getMessage()) - .build(); - } - - - @ExceptionHandler(ConstraintViolationException.class) - public RestErrorResponse constraintViolationException(ConstraintViolationException ex){ - StringBuilder message = new StringBuilder(); - Set> violations = ex.getConstraintViolations(); - for (ConstraintViolation violation : violations) { - message.append(violation.getMessage().concat(";")); - } - - RestErrorResponse.Builder responseBuilder = new RestErrorResponse.Builder(); - return responseBuilder.setStatus(HttpStatus.SERVICE_UNAVAILABLE) - .setCode(KafkaErrorCode.UNKNOWN_TOPIC_OR_PARTITION.ordinal()) - .setMessage("Constraint Violation Exception happened!") - .setMessage(message.toString().substring(0,message.length()-1)) - .setDeveloperMessage(ex.getMessage()) - .build(); - } - - - @ExceptionHandler(ResourceNotFoundException.class) - public RestErrorResponse serviceNotAvailableException(ServiceNotAvailableException ex){ - RestErrorResponse.Builder responseBuilder = new RestErrorResponse.Builder(); - return responseBuilder.setStatus(HttpStatus.SERVICE_UNAVAILABLE) - .setCode(KafkaErrorCode.SERVICE_DOWN.ordinal()) - .setMessage("Service not Available happened: " + ex) - .setDeveloperMessage(ex.getMessage()) - .build(); - } } diff --git a/src/main/java/org/gnuhpc/bigdata/exception/RestErrorResponse.java b/src/main/java/org/gnuhpc/bigdata/exception/RestErrorResponse.java index f452a52..72e35bf 100644 --- a/src/main/java/org/gnuhpc/bigdata/exception/RestErrorResponse.java +++ b/src/main/java/org/gnuhpc/bigdata/exception/RestErrorResponse.java @@ -1,20 +1,58 @@ package org.gnuhpc.bigdata.exception; +import com.fasterxml.jackson.annotation.JsonFormat; +import lombok.AllArgsConstructor; import lombok.Data; +import lombok.EqualsAndHashCode; +import org.hibernate.validator.internal.engine.path.PathImpl; import org.springframework.http.HttpStatus; import org.springframework.util.ObjectUtils; +import org.springframework.validation.FieldError; +import org.springframework.validation.ObjectError; + +import javax.validation.ConstraintViolation; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; @Data public class RestErrorResponse { - private final HttpStatus status; - private final int code; - private final String message; - private final String developerMessage; - private final String moreInfoUrl; + private HttpStatus status; + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") + private LocalDateTime timestamp; + private int code; + private String message; + private String developerMessage; + private String moreInfoUrl; + private List subErrorList; + + public RestErrorResponse() { + //this.timestamp = new Date(); + this.timestamp = LocalDateTime.now(); + } + + public RestErrorResponse(HttpStatus status, String message, Throwable ex) { + this(); + this.status = status; + this.code = status.value(); + this.message = message; + this.developerMessage = ex.getLocalizedMessage(); + } + + public RestErrorResponse(HttpStatus status, String message, String moreInfoUrl, Throwable ex) { + this(); + this.status = status; + this.code = status.value(); + this.message = message; + this.developerMessage = ex.getLocalizedMessage(); + this.moreInfoUrl = moreInfoUrl; + } public RestErrorResponse(HttpStatus status, int code, String message, String developerMessage, String moreInfoUrl) { + this(); if (status == null) { throw new NullPointerException("HttpStatus argument cannot be null."); } @@ -25,7 +63,6 @@ public RestErrorResponse(HttpStatus status, int code, String message, String dev this.moreInfoUrl = moreInfoUrl; } - @Override public boolean equals(Object o) { if (this == o) { @@ -58,8 +95,60 @@ public String toString() { .toString(); } - public static class Builder { + private void addSubError(RestSubError subError) { + if (subErrorList == null) { + subErrorList = new ArrayList<>(); + } + subErrorList.add(subError); + } + + private void addValidationError(String object, String field, Object rejectedValue, String message) { + addSubError(new RestValidationError(object, field, rejectedValue, message)); + } + + private void addValidationError(String object, String message) { + addSubError(new RestValidationError(object, message)); + } + + private void addValidationError(FieldError fieldError) { + this.addValidationError( + fieldError.getObjectName(), + fieldError.getField(), + fieldError.getRejectedValue(), + fieldError.getDefaultMessage()); + } + void addValidationErrors(List fieldErrors) { + fieldErrors.forEach(this::addValidationError); + } + + private void addValidationError(ObjectError objectError) { + this.addValidationError( + objectError.getObjectName(), + objectError.getDefaultMessage()); + } + + void addValidationError(List globalErrors) { + globalErrors.forEach(this::addValidationError); + } + + /** + * Utility method for adding error of ConstraintViolation. Usually when a @Validated validation fails. + * @param cv the ConstraintViolation + */ + private void addValidationError(ConstraintViolation cv) { + this.addValidationError( + cv.getRootBeanClass().getSimpleName(), + ((PathImpl) cv.getPropertyPath()).getLeafNode().asString(), + cv.getInvalidValue(), + cv.getMessage()); + } + + void addValidationErrors(Set> constraintViolations) { + constraintViolations.forEach(this::addValidationError); + } + + public static class Builder { private HttpStatus status; private int code; private String message; @@ -107,4 +196,23 @@ public RestErrorResponse build() { return new RestErrorResponse(this.status, this.code, this.message, this.developerMessage, this.moreInfoUrl); } } + + abstract class RestSubError { + + } + + @Data + @EqualsAndHashCode(callSuper = false) + @AllArgsConstructor + class RestValidationError extends RestSubError { + private String object; + private String field; + private Object rejectedValue; + private String message; + + RestValidationError(String object, String message) { + this.object = object; + this.message = message; + } + } } diff --git a/src/main/java/org/gnuhpc/bigdata/model/HealthCheckResult.java b/src/main/java/org/gnuhpc/bigdata/model/HealthCheckResult.java new file mode 100644 index 0000000..910f7bd --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/HealthCheckResult.java @@ -0,0 +1,20 @@ +package org.gnuhpc.bigdata.model; + +import com.fasterxml.jackson.annotation.JsonFormat; +import lombok.Getter; +import lombok.Setter; + +import java.time.LocalDateTime; + +@Getter +@Setter +public class HealthCheckResult { + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") + private LocalDateTime timestamp; + public String status; + public String msg; + + public HealthCheckResult() { + this.timestamp = LocalDateTime.now(); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXAttribute.java b/src/main/java/org/gnuhpc/bigdata/model/JMXAttribute.java new file mode 100644 index 0000000..3419edc --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXAttribute.java @@ -0,0 +1,330 @@ +package org.gnuhpc.bigdata.model; + +import lombok.Getter; +import lombok.Setter; + +import javax.management.*; +import java.io.IOException; +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.concurrent.atomic.AtomicLong; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@Getter +@Setter +public abstract class JMXAttribute { + private MBeanAttributeInfo attribute; + private ObjectName beanName; + private MBeanServerConnection connection; + private String attributeName; + private String beanStringName; + private String domain; + private HashMap beanParameters; + private JMXConfiguration matchingConf; + private LinkedHashMap> valueConversions = new LinkedHashMap>(); + private static final List EXCLUDED_BEAN_PARAMS = Arrays.asList("domain", "domain_regex", "bean_name", "bean", + "bean_regex", "attribute", "exclude_tags", "tags"); + protected static final String METRIC_TYPE = "metric_type"; + protected static final String ALIAS = "alias"; + private static final String FIRST_CAP_PATTERN = "(.)([A-Z][a-z]+)"; + private static final String ALL_CAP_PATTERN = "([a-z0-9])([A-Z])"; + private static final String METRIC_REPLACEMENT = "([^a-zA-Z0-9_.]+)|(^[^a-zA-Z]+)"; + private static final String DOT_UNDERSCORE = "_*\\._*"; + + public JMXAttribute(MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { + this.attribute = attribute; + this.attributeName = attribute.getName(); + this.beanName = beanName; + this.beanStringName = beanName.toString(); + this.connection = connection; + // A bean name is formatted like that: org.apache.cassandra.db:type=Caches,keyspace=system,cache=HintsColumnFamilyKeyCache + // i.e. : domain:bean_parameter1,bean_parameter2 + //Note: some beans have a ':' in the name. Example: some.domain:name="some.bean.0.0.0.0:80.some-metric" + int splitPosition = beanStringName.indexOf(':'); + String domain = beanStringName.substring(0, splitPosition); + String beanParametersString = beanStringName.substring(splitPosition+1); + this.domain = domain; + this.matchingConf = null; + + HashMap beanParametersHash = getBeanParametersHash(beanParametersString); + //LinkedList beanParametersList = getBeanParametersList(instanceName, beanParametersHash, instanceTags); + this.beanParameters = beanParametersHash; + } + + + public abstract LinkedList> getMetrics() throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, ReflectionException, IOException; + + /** + * An abstract function implemented in the inherited classes JMXSimpleAttribute and JMXComplexAttribute + * + * @param conf Configuration a Configuration object that will be used to check if the JMX Attribute match this configuration + * @return a boolean that tells if the attribute matches the configuration or not + */ + public abstract boolean match(JMXConfiguration conf); + + public static HashMap getBeanParametersHash(String beanParametersString) { + String[] beanParameters = beanParametersString.split(","); + HashMap beanParamsMap = new HashMap(beanParameters.length); + for (String param : beanParameters) { + String[] paramSplit = param.split("="); + if (paramSplit.length > 1) { + beanParamsMap.put(new String(paramSplit[0]), new String(paramSplit[1])); + } else { + beanParamsMap.put(new String(paramSplit[0]), ""); + } + } + + return beanParamsMap; + } + + boolean matchDomain(JMXConfiguration conf) { + String includeDomain = conf.getInclude().getDomain(); + Pattern includeDomainRegex = conf.getInclude().getDomainRegex(); + + return (includeDomain == null || includeDomain.equals(this.getDomain())) + && (includeDomainRegex == null || includeDomainRegex.matcher(this.getDomain()).matches()); + } + + boolean matchBean(JMXConfiguration configuration) { + return matchBeanName(configuration) && matchBeanRegex(configuration.getInclude(), true); + } + + private boolean matchBeanName(JMXConfiguration configuration) { + JMXFilter include = configuration.getInclude(); + + if (!include.isEmptyBeanName() && !include.getBeanNames().contains(this.getBeanStringName())) { + return false; + } + + for (String bean_attr : include.keySet()) { + if (EXCLUDED_BEAN_PARAMS.contains(bean_attr)) { + continue; + } + + ArrayList beanValues = include.getParameterValues(bean_attr); + + if (beanParameters.get(bean_attr) == null || !(beanValues.contains(beanParameters.get(bean_attr)))){ + return false; + } + } + return true; + } + + private boolean matchBeanRegex(JMXFilter filter, boolean matchIfNoRegex) { + if (filter == null) return matchIfNoRegex; + ArrayList beanRegexes = filter.getBeanRegexes(); + if (beanRegexes.isEmpty()) { + return matchIfNoRegex; + } + + for (Pattern beanRegex : beanRegexes) { + Matcher m = beanRegex.matcher(beanStringName); + + if(m.matches()) { + for (int i = 0; i<= m.groupCount(); i++) { + this.beanParameters.put(Integer.toString(i), m.group(i)); + } + return true; + } + } + return false; + } + + boolean excludeMatchDomain(JMXConfiguration conf) { + if (conf.getExclude() == null) return false; + String excludeDomain = conf.getExclude().getDomain(); + Pattern excludeDomainRegex = conf.getExclude().getDomainRegex(); + + return excludeDomain != null && excludeDomain.equals(domain) + || excludeDomainRegex != null && excludeDomainRegex.matcher(domain).matches(); + } + + boolean excludeMatchBean(JMXConfiguration configuration) { + return excludeMatchBeanName(configuration) || matchBeanRegex(configuration.getExclude(), false); + } + + private boolean excludeMatchBeanName(JMXConfiguration conf) { + JMXFilter exclude = conf.getExclude(); + if (exclude == null) return false; + ArrayList beanNames = exclude.getBeanNames(); + + if(beanNames.contains(beanStringName)){ + return true; + } + + for (String bean_attr : exclude.keySet()) { + if (EXCLUDED_BEAN_PARAMS.contains(bean_attr)) { + continue; + } + + if (beanParameters.get(bean_attr) == null) { + continue; + } + + ArrayList beanValues = exclude.getParameterValues(bean_attr); + for (String beanVal : beanValues) { + if (beanParameters.get(bean_attr).equals(beanVal)) { + return true; + } + } + } + return false; + } + + Object getJmxValue() throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, ReflectionException, IOException { + return this.getConnection().getAttribute(this.getBeanName(), this.getAttribute().getName()); + } + + public static List getExcludedBeanParams(){ + return EXCLUDED_BEAN_PARAMS; + } + + double castToDouble(Object metricValue, String field) { + Object value = convertMetricValue(metricValue, field); + + if (value instanceof String) { + return Double.parseDouble((String) value); + } else if (value instanceof Integer) { + return new Double((Integer) (value)); + } else if (value instanceof AtomicInteger) { + return new Double(((AtomicInteger) (value)).get()); + } else if (value instanceof AtomicLong) { + Long l = ((AtomicLong) (value)).get(); + return l.doubleValue(); + } else if (value instanceof Double) { + return (Double) value; + } else if (value instanceof Boolean) { + return ((Boolean) value ? 1.0 : 0.0); + } else if (value instanceof Long) { + Long l = new Long((Long) value); + return l.doubleValue(); + } else if (value instanceof Number) { + return ((Number) value).doubleValue(); + } else { + try { + return new Double((Double) value); + } catch (Exception e) { + throw new NumberFormatException(); + } + } + } + + Object convertMetricValue(Object metricValue, String field) { + Object converted = metricValue; + + if (!getValueConversions(field).isEmpty()) { + converted = getValueConversions(field).get(metricValue); + if (converted == null && getValueConversions(field).get("default") != null) { + converted = getValueConversions(field).get("default"); + } + } + + return converted; + } + + @SuppressWarnings("unchecked") + HashMap getValueConversions(String field) { + String fullAttributeName =(field!=null)?(getAttribute().getName() + "." + field):(getAttribute().getName()); + if (valueConversions.get(fullAttributeName) == null) { + Object includedAttribute = matchingConf.getInclude().getAttribute(); + if (includedAttribute instanceof LinkedHashMap) { + LinkedHashMap> attribute = + ((LinkedHashMap>>) includedAttribute).get(fullAttributeName); + + if (attribute != null) { + valueConversions.put(fullAttributeName, attribute.get("values")); + } + } + if (valueConversions.get(fullAttributeName) == null) { + valueConversions.put(fullAttributeName, new LinkedHashMap()); + } + } + + return valueConversions.get(fullAttributeName); + } + + /** + * Overload `getAlias` method. + * + * Note: used for `JMXSimpleAttribute` only, as `field` is null. + */ + protected String getAlias(){ + return getAlias(null); + } + + /** + * Get attribute alias. + * + * In order, tries to: + * * Use `alias_match` to generate an alias with a regular expression + * * Use `alias` directly + * * Create an generic alias prefixed with user's `metric_prefix` preference or default to `jmx` + * + * Argument(s): + * * (Optional) `field` + * `Null` for `JMXSimpleAttribute`. + */ + protected String getAlias(String field) { + String alias = null; + + JMXFilter include = getMatchingConf().getInclude(); + + String fullAttributeName =(field!=null)?(getAttribute().getName() + "." + field):(getAttribute().getName()); + + if (include.getAttribute() instanceof LinkedHashMap) { + LinkedHashMap> attribute = (LinkedHashMap>) (include.getAttribute()); + alias = getUserAlias(attribute, fullAttributeName); + } + + //If still null - generate generic alias + if (alias == null) { + alias = "jmx." + getDomain() + "." + fullAttributeName; + } + + return alias; + } + + /** + * Retrieve user defined alias. Substitute regular expression named groups. + * + * Example: + * ``` + * bean: org.datadog.jmxfetch.test:foo=Bar,qux=Baz + * attribute: + * toto: + * alias: my.metric.$foo.$attribute + * ``` + * returns a metric name `my.metric.bar.toto` + */ + private String getUserAlias(LinkedHashMap> attribute, String fullAttributeName){ + String alias = attribute.get(fullAttributeName).get(ALIAS); + if (alias == null) { + return null; + } + + alias = this.replaceByAlias(alias); + + // Attribute & domain + alias = alias.replace("$attribute", fullAttributeName); + alias = alias.replace("$domain", domain); + + return alias; + } + + private String replaceByAlias(String alias){ + // Bean parameters + for (Map.Entry param : beanParameters.entrySet()) { + alias = alias.replace("$" + param.getKey(), param.getValue()); + } + return alias; + } + + static String convertMetricName(String metricName) { + metricName = metricName.replaceAll(FIRST_CAP_PATTERN, "$1_$2"); + metricName = metricName.replaceAll(ALL_CAP_PATTERN, "$1_$2").toLowerCase(); + metricName = metricName.replaceAll(METRIC_REPLACEMENT, "_"); + metricName = metricName.replaceAll(DOT_UNDERSCORE, ".").trim(); + return metricName; + } +} \ No newline at end of file diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXClient.java b/src/main/java/org/gnuhpc/bigdata/model/JMXClient.java new file mode 100644 index 0000000..2c30e94 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXClient.java @@ -0,0 +1,128 @@ +package org.gnuhpc.bigdata.model; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.config.JMXConfig; +import org.gnuhpc.bigdata.exception.CollectorException; + +import javax.management.remote.JMXConnector; +import javax.management.remote.JMXConnectorFactory; +import javax.management.remote.JMXServiceURL; +import java.io.IOException; +import java.net.MalformedURLException; +import java.net.SocketTimeoutException; +import java.util.HashMap; +import java.util.Map; +import java.util.concurrent.*; + +import static com.google.common.base.Preconditions.checkNotNull; + +@Getter +@Setter +@Log4j +public class JMXClient { + private String ip; + private String port; + private JMXConnector jmxConnector = null; + private static final ThreadFactory daemonThreadFactory = new DaemonThreadFactory(); + private String jmxServiceURL; + private Map jmxEnv; + private static final long CONNECTION_TIMEOUT = 10000; + private static final long JMX_TIMEOUT = 20; + + public JMXClient() { + jmxEnv = new HashMap<>(); + jmxEnv.put(JMXConfig.JMX_CONNECT_TIMEOUT, CONNECTION_TIMEOUT); + } + + public JMXClient(String host) { + this(); + String[] ipAndPort = host.split(":"); + this.ip = ipAndPort[0]; + this.port = ipAndPort[1]; + this.jmxServiceURL = new StringBuilder().append(JMXConfig.JMX_PROTOCOL) + .append(this.ip) + .append(":") + .append(this.port) + .append("/jmxrmi").toString(); + } + + public JMXConnector connect() throws CollectorException { + try { + JMXServiceURL jmxServiceURL = new JMXServiceURL(this.jmxServiceURL); + jmxConnector = JMXConnectorFactory.connect(jmxServiceURL, jmxEnv); + } catch (MalformedURLException e) { + throw new CollectorException(String.format("%s occurred. URL: %s. Reason: %s", + e.getClass().getCanonicalName(), this.jmxServiceURL, e.getCause()), e); + } catch (IOException e) { + throw new CollectorException(String.format("%s occurred. URL: %s. Reason: %s", + e.getClass().getCanonicalName(), this.jmxServiceURL, e.getCause()), e); + } + return jmxConnector; + } + + /** + * This code comes from Datadog jmxFetch. + * https://github.com/DataDog/jmxfetch/blob/master/src/main/java/org/datadog/jmxfetch/Connection.java + */ + public JMXConnector connectWithTimeout() throws IOException, InterruptedException { + JMXServiceURL url = new JMXServiceURL(this.jmxServiceURL); + + BlockingQueue mailbox = new ArrayBlockingQueue(1); + + ExecutorService executor = Executors.newSingleThreadExecutor(daemonThreadFactory); + executor.submit(() -> { + try { + JMXConnector connector = JMXConnectorFactory.connect(url, jmxEnv); + if (!mailbox.offer(connector)) { + connector.close(); + } + } catch (Throwable t) { + mailbox.offer(t); + } + }); + Object result; + try { + result = mailbox.poll(JMX_TIMEOUT, TimeUnit.SECONDS); + if (result == null) { + if (!mailbox.offer("")) + result = mailbox.take(); + } + } catch (InterruptedException e) { + throw e; + } finally { + executor.shutdown(); + } + if (result == null) { + log.warn("Connection timed out: " + url); + throw new SocketTimeoutException("Connection timed out: " + url); + } + if (result instanceof JMXConnector) { + jmxConnector = (JMXConnector) result; + return jmxConnector; + } + try { + throw (Throwable) result; + } catch (Throwable e) { + throw new IOException(e.toString(), e); + } + } + + public void close() throws CollectorException { + checkNotNull(jmxConnector); + try { + jmxConnector.close(); + } catch (IOException e) { + throw new CollectorException("Cannot close connection. ", e); + } + } + + private static class DaemonThreadFactory implements ThreadFactory { + public Thread newThread(Runnable r) { + Thread t = Executors.defaultThreadFactory().newThread(r); + t.setDaemon(true); + return t; + } + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXComplexAttribute.java b/src/main/java/org/gnuhpc/bigdata/model/JMXComplexAttribute.java new file mode 100644 index 0000000..6107797 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXComplexAttribute.java @@ -0,0 +1,167 @@ +package org.gnuhpc.bigdata.model; + +import javax.management.*; +import javax.management.openmbean.CompositeData; +import java.io.IOException; +import java.util.*; + +public class JMXComplexAttribute extends JMXAttribute { + private HashMap> subAttributeList; + + public JMXComplexAttribute(MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { + super(attribute, beanName, connection); + this.subAttributeList = new HashMap<>(); + } + + @Override + public LinkedList> getMetrics() + throws AttributeNotFoundException, InstanceNotFoundException, + MBeanException, ReflectionException, IOException { + + LinkedList> metrics = new LinkedList>(); + + for (Map.Entry> pair : subAttributeList.entrySet()) { + String subAttribute = pair.getKey(); + HashMap metric = pair.getValue(); + if (metric.get(ALIAS) == null) { + metric.put(ALIAS, convertMetricName(getAlias(subAttribute))); + } + if (metric.get(METRIC_TYPE) == null) { + metric.put("domain", getBeanName().getDomain()); + metric.put("beanName", getBeanName().toString()); + metric.put("attributeName", subAttribute); + metric.put(METRIC_TYPE, getMetricType(subAttribute)); + } + + /* + if (metric.get("tags") == null) { + metric.put("tags", getTags()); + } + */ + + metric.put("value", castToDouble(getValue(subAttribute), subAttribute)); + metrics.add(metric); + + } + return metrics; + + } + + private Object getMetricType(String subAttribute) { + String subAttributeName = getAttribute().getName() + "." + subAttribute; + String metricType = null; + + JMXFilter include = getMatchingConf().getInclude(); + if (include.getAttribute() instanceof LinkedHashMap) { + LinkedHashMap> attribute = (LinkedHashMap>) (include.getAttribute()); + metricType = attribute.get(subAttributeName).get(METRIC_TYPE); + if (metricType == null) { + metricType = attribute.get(subAttributeName).get("type"); + } + } + + if (metricType == null) { + metricType = "gauge"; + } + + return metricType; + } + + private Object getValue(String subAttribute) throws AttributeNotFoundException, InstanceNotFoundException, + MBeanException, ReflectionException, IOException { + + Object value = this.getJmxValue(); + String attributeType = getAttribute().getType(); + + if ("javax.management.openmbean.CompositeData".equals(attributeType)) { + CompositeData data = (CompositeData) value; + return data.get(subAttribute); + } else if (("java.util.HashMap".equals(attributeType)) || ("java.util.Map".equals(attributeType))) { + Map data = (Map) value; + return data.get(subAttribute); + } + throw new NumberFormatException(); + } + + @Override + public boolean match(JMXConfiguration configuration) { + if (!matchDomain(configuration) + || !matchBean(configuration) + || excludeMatchDomain(configuration) + || excludeMatchBean(configuration)) { + return false; + } + + try { + populateSubAttributeList(getJmxValue()); + } catch (Exception e) { + return false; + } + + return matchAttribute(configuration) && !excludeMatchAttribute(configuration); + } + + private void populateSubAttributeList(Object attributeValue) { + String attributeType = getAttribute().getType(); + if ("javax.management.openmbean.CompositeData".equals(attributeType)) { + CompositeData data = (CompositeData) attributeValue; + for (String key : data.getCompositeType().keySet()) { + this.subAttributeList.put(key, new HashMap()); + } + } else if (("java.util.HashMap".equals(attributeType)) || ("java.util.Map".equals(attributeType))) { + Map data = (Map) attributeValue; + for (String key : data.keySet()) { + this.subAttributeList.put(key, new HashMap()); + } + } + } + + private boolean excludeMatchAttribute(JMXConfiguration configuration) { + JMXFilter exclude = configuration.getExclude(); + if (exclude == null) return false; + if (exclude.getAttribute() != null && matchSubAttribute(exclude, getAttributeName(), false)) { + return true; + } + + Iterator it = subAttributeList.keySet().iterator(); + while (it.hasNext()) { + String subAttribute = it.next(); + if (matchSubAttribute(exclude, getAttributeName() + "." + subAttribute, false)) { + it.remove(); + } + } + return subAttributeList.size() <= 0; + } + + private boolean matchAttribute(JMXConfiguration configuration) { + if (matchSubAttribute(configuration.getInclude(), getAttributeName(), true)) { + return true; + } + + Iterator it = subAttributeList.keySet().iterator(); + + while (it.hasNext()) { + String subAttribute = it.next(); + if (!matchSubAttribute(configuration.getInclude(), getAttributeName() + "." + subAttribute, true)) { + it.remove(); + } + } + + return subAttributeList.size() > 0; + } + + private boolean matchSubAttribute(JMXFilter params, String subAttributeName, boolean matchOnEmpty) { + if ((params.getAttribute() instanceof LinkedHashMap) + && ((LinkedHashMap) (params.getAttribute())).containsKey(subAttributeName)) { + return true; + } else if ((params.getAttribute() instanceof ArrayList + && ((ArrayList) (params.getAttribute())).contains(subAttributeName))) { + return true; + } else if (params.getAttribute() == null) { + return matchOnEmpty; + } + return false; + + } + +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXConfiguration.java b/src/main/java/org/gnuhpc/bigdata/model/JMXConfiguration.java new file mode 100644 index 0000000..ee47f14 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXConfiguration.java @@ -0,0 +1,226 @@ +package org.gnuhpc.bigdata.model; + +import com.fasterxml.jackson.annotation.JsonProperty; +import lombok.Getter; +import lombok.Setter; +import com.fasterxml.jackson.annotation.JsonCreator; + +import java.util.*; + +@Getter +@Setter +public class JMXConfiguration { + private JMXFilter include; + private JMXFilter exclude; + + /** + * Access JMXConfiguration elements more easily + * + * Also provides helper methods to extract common information among JMXFilters. + */ + @JsonCreator + public JMXConfiguration(@JsonProperty("include") JMXFilter include, @JsonProperty("exclude") JMXFilter exclude) { + this.include = include; + this.exclude = exclude; + } + + private Boolean hasInclude(){ + return getInclude() != null; + } + + /** + * JMXFilter a configuration list to keep the ones with `include` JMXFilters. + * + * @param configurationList the configuration list to JMXFilter + * + * @return a configuration list + */ + private static LinkedList getIncludeConfigurationList(LinkedList configurationList){ + LinkedList includeConfigList = new LinkedList(configurationList); + Iterator confItr = includeConfigList.iterator(); + + while(confItr.hasNext()) { + JMXConfiguration conf = confItr.next(); + if (!conf.hasInclude()) { + confItr.remove(); + } + } + return includeConfigList; + } + + /** + * Extract `include` JMXFilters from the configuration list and index then by domain name. + * + * @param configurationList the configuration list to process + * + * @return JMXFilters by domain name + */ + private static HashMap> getIncludeJMXFiltersByDomain(LinkedList configurationList){ + HashMap> includeJMXFiltersByDomain = new HashMap>(); + + for (JMXConfiguration conf : configurationList) { + JMXFilter JMXFilter = conf.getInclude(); + LinkedList JMXFilters = new LinkedList(); + + // Convert bean name, to a proper JMXFilter, i.e. a hash + if (!JMXFilter.isEmptyBeanName()) { + ArrayList beanNames = JMXFilter.getBeanNames(); + + for (String beanName : beanNames) { + String[] splitBeanName = beanName.split(":"); + String domain = splitBeanName[0]; + String rawBeanParameters = splitBeanName[1]; + HashMap beanParametersHash = JMXAttribute.getBeanParametersHash(rawBeanParameters); + beanParametersHash.put("domain", domain); + JMXFilters.add(new JMXFilter(beanParametersHash)); + } + } else { + JMXFilters.add(JMXFilter); + } + + for (JMXFilter f: JMXFilters) { + // Retrieve the existing JMXFilters for the domain, add the new JMXFilters + LinkedList domainJMXFilters; + String domainName = f.getDomain(); + + if (includeJMXFiltersByDomain.containsKey(domainName)) { + domainJMXFilters = includeJMXFiltersByDomain.get(domainName); + } else { + domainJMXFilters = new LinkedList(); + } + + domainJMXFilters.add(f); + includeJMXFiltersByDomain.put(domainName, domainJMXFilters); + } + } + return includeJMXFiltersByDomain; + } + + /** + * Extract, among JMXFilters, bean key parameters in common. + * + * @param JMXFiltersByDomain JMXFilters by domain name + * + * @return common bean key parameters by domain name + */ + private static HashMap> getCommonBeanKeysByDomain(HashMap> JMXFiltersByDomain){ + HashMap> beanKeysIntersectionByDomain = new HashMap>(); + + for (Map.Entry> JMXFiltersEntry : JMXFiltersByDomain.entrySet()) { + String domainName = JMXFiltersEntry.getKey(); + LinkedList mJMXFilters= JMXFiltersEntry.getValue(); + + // Compute keys intersection + Set keysIntersection = new HashSet(mJMXFilters.getFirst().keySet()); + + for (JMXFilter f: mJMXFilters) { + keysIntersection.retainAll(f.keySet()); + } + + // Remove special parameters + for(String param : JMXAttribute.getExcludedBeanParams()){ + keysIntersection.remove(param); + } + beanKeysIntersectionByDomain.put(domainName, keysIntersection); + } + + return beanKeysIntersectionByDomain; + } + + /** + * Build a map of common bean keys->values, with the specified bean keys, among the given JMXFilters. + * + * @param beanKeysByDomain bean keys by domain name + * @param JMXFiltersByDomain JMXFilters by domain name + * + * @return bean pattern (keys->values) by domain name + */ + private static HashMap> getCommonScopeByDomain(HashMap> beanKeysByDomain, HashMap> JMXFiltersByDomain){ + // Compute a common scope a among JMXFilters by domain name + HashMap> commonScopeByDomain = new HashMap>(); + + for (Map.Entry> commonParametersByDomainEntry : beanKeysByDomain.entrySet()) { + String domainName = commonParametersByDomainEntry.getKey(); + Set commonParameters = commonParametersByDomainEntry.getValue(); + LinkedList JMXFilters = JMXFiltersByDomain.get(domainName); + LinkedHashMap commonScope = new LinkedHashMap(); + + for (String parameter : commonParameters) { + // Check if all values associated with the parameters are the same + String commonValue = null; + Boolean hasCommonValue = true; + + for (JMXFilter f : JMXFilters) { + ArrayList parameterValues = f.getParameterValues(parameter); + + if (parameterValues.size() != 1 || (commonValue != null && !commonValue.equals(parameterValues.get(0)))) { + hasCommonValue = false; + break; + } + commonValue = parameterValues.get(0); + + } + if (hasCommonValue) { + commonScope.put(parameter, commonValue); + } + } + commonScopeByDomain.put(domainName, commonScope); + } + + return commonScopeByDomain; + } + + /** + * Stringify a bean pattern. + * + * @param domain domain name + * @param beanScope map of bean keys-> values + * + * @return string pattern identifying the bean scope + */ + private static String beanScopeToString(String domain, LinkedHashMap beanScope){ + String result = ""; + + // Domain + domain = (domain != null) ? domain : "*"; + result += domain + ":"; + + // Scope parameters + for (Map.Entry beanScopeEntry : beanScope.entrySet()) { + String param = beanScopeEntry.getKey(); + String value = beanScopeEntry.getValue(); + + result += param + "=" + value + ","; + } + result += "*"; + + return result; + } + + /** + * Find, among the JMXConfiguration list, a potential common bean pattern by domain name. + * + * @param JMXConfigurationList the JMXConfiguration list to process + * + * @return common bean pattern strings + */ + public static LinkedList getGreatestCommonScopes(LinkedList JMXConfigurationList){ + LinkedList result = new LinkedList(); + if (JMXConfigurationList == null || JMXConfigurationList.isEmpty()) { + return result; + } + LinkedList includeConfigList = getIncludeConfigurationList(JMXConfigurationList); + HashMap> includeJMXFiltersByDomain = getIncludeJMXFiltersByDomain(includeConfigList); + HashMap> parametersIntersectionByDomain = getCommonBeanKeysByDomain(includeJMXFiltersByDomain); + HashMap> commonBeanScopeByDomain = getCommonScopeByDomain(parametersIntersectionByDomain, includeJMXFiltersByDomain); + + for (Map.Entry> beanScopeEntry: commonBeanScopeByDomain.entrySet()) { + String domain = beanScopeEntry.getKey(); + LinkedHashMap beanScope = beanScopeEntry.getValue(); + + result.add(beanScopeToString(domain, beanScope)); + } + + return result; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXFilter.java b/src/main/java/org/gnuhpc/bigdata/model/JMXFilter.java new file mode 100644 index 0000000..79c7e25 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXFilter.java @@ -0,0 +1,180 @@ +package org.gnuhpc.bigdata.model; + +import com.fasterxml.jackson.annotation.JsonCreator; +import lombok.Getter; +import lombok.Setter; + +import java.util.*; +import java.util.regex.Pattern; + +@Getter +@Setter +public class JMXFilter { + HashMap filter; + Pattern domainRegex; + ArrayList beanRegexes = null; + //ArrayList excludeTags = null; + //HashMap additionalTags = null; + + /** + * A simple class to manipulate include/exclude filter elements more easily + * A filter may contain: + * - A domain (key: 'domain') or a domain regex (key: 'domain_regex') + * - Bean names (key: 'bean' or 'bean_name') or bean regexes (key: 'bean_regex') + * - Attributes (key: 'attribute') + * - Additional bean parameters (other keys) + */ + @JsonCreator + @SuppressWarnings("unchecked") + public JMXFilter(Object filter) { + HashMap castFilter; + if (filter != null) { + castFilter = (HashMap) filter; + } else { + castFilter = new HashMap(); + } + this.filter = castFilter; + } + + public String toString() { + return this.filter.toString(); + } + + public Set keySet() { + return filter.keySet(); + } + + @SuppressWarnings({ "unchecked", "serial" }) + private static ArrayList toStringArrayList(final Object toCast) { + // Return object as an ArrayList wherever it's defined as + // list or not + // + // ### Example + // object: + // - firstValue + // - secondValue + // ### OR + // object: singleValue + // ### + if (toCast instanceof String) { + ArrayList toCastList = new ArrayList<>(); + toCastList.add(toCast.toString()); + return toCastList; + } + return (ArrayList) toCast; + } + + + public ArrayList getBeanNames() { + if (isEmptyBeanName()){ + return new ArrayList(); + } + final Object beanNames = (filter.get("bean") != null) ? filter.get("bean") : filter.get("bean_name"); + // Return bean names as an ArrayList wherever it's defined as + // list or not + // + // ### Example + // bean: + // - org.apache.cassandra.db:type=Caches,keyspace=system,cache=HintsColumnFamilyKeyCache + // - org.datadog.jmxfetch.test:type=type=SimpleTestJavaApp + // ### OR + // bean: org.datadog.jmxfetch.test:type=type=SimpleTestJavaApp + // ### + return toStringArrayList(beanNames); + } + + private static ArrayList toPatternArrayList(final Object toCast) { + ArrayList patternArrayList = new ArrayList(); + ArrayList stringArrayList = toStringArrayList(toCast); + for (String string : stringArrayList) { + patternArrayList.add(Pattern.compile(string)); + } + + return patternArrayList; + } + + public ArrayList getBeanRegexes() { + // Return bean regexes as an ArrayList of Pattern whether it's defined as + // a list or not + + if (this.beanRegexes == null) { + if (filter.get("bean_regex") == null){ + this.beanRegexes = new ArrayList(); + } else { + final Object beanRegexNames = filter.get("bean_regex"); + this.beanRegexes = toPatternArrayList(beanRegexNames); + } + } + + return this.beanRegexes; + } + + /* + public ArrayList getExcludeTags() { + // Return excluded tags as an ArrayList whether it's defined as a list or not + + if (this.excludeTags == null) { + if (filter.get("exclude_tags") == null){ + this.excludeTags = new ArrayList(); + } else { + final Object exclude_tags = filter.get("exclude_tags"); + this.excludeTags = toStringArrayList(exclude_tags); + } + } + + return this.excludeTags; + } + + public HashMap getAdditionalTags() { + // Return additional tags + if (this.additionalTags == null) { + if (filter.get("tags") == null){ + this.additionalTags = new HashMap(); + } else { + this.additionalTags = (HashMap)filter.get("tags"); + } + } + + return this.additionalTags; + } + */ + + public String getDomain() { + return (String) filter.get("domain"); + } + + public Pattern getDomainRegex() { + if (this.filter.get("domain_regex") == null) { + return null; + } + + if (this.domainRegex == null) { + this.domainRegex = Pattern.compile((String) this.filter.get("domain_regex")); + } + + return this.domainRegex; + } + + public Object getAttribute() { + return filter.get("attribute"); + } + + public ArrayList getParameterValues(String parameterName) { + // Return bean attributes values as an ArrayList wherever it's defined as + // list or not + // + // ### Example + // bean_parameter: + // - exampleType1 + // - exampleType2 + // ### OR + // bean_parameter: onlyOneType + // ### + final Object beanValues = filter.get(parameterName); + return toStringArrayList(beanValues); + } + + public boolean isEmptyBeanName() { + return (filter.get("bean") == null && filter.get("bean_name") == null); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXMetricData.java b/src/main/java/org/gnuhpc/bigdata/model/JMXMetricData.java new file mode 100644 index 0000000..5ee3876 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXMetricData.java @@ -0,0 +1,26 @@ +package org.gnuhpc.bigdata.model; + +import com.fasterxml.jackson.annotation.JsonFormat; +import lombok.Getter; +import lombok.Setter; + +import java.time.LocalDateTime; +import java.util.HashMap; +import java.util.LinkedList; + +@Getter +@Setter +public class JMXMetricData { + private String host; + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") + private LocalDateTime timestamp; + private Boolean collected; + private LinkedList> metrics; + private String msg; + + public JMXMetricData(String host, LinkedList> metrics) { + this.host = host; + this.timestamp = LocalDateTime.now(); + this.metrics = metrics; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXMetricDataV1.java b/src/main/java/org/gnuhpc/bigdata/model/JMXMetricDataV1.java new file mode 100644 index 0000000..8ee573e --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXMetricDataV1.java @@ -0,0 +1,25 @@ +package org.gnuhpc.bigdata.model; + +import java.time.LocalDateTime; +import java.util.Map; + +import com.fasterxml.jackson.annotation.JsonFormat; +import lombok.Getter; +import lombok.Setter; + +@Getter +@Setter +public class JMXMetricDataV1 { + private String host; + @JsonFormat(shape = JsonFormat.Shape.STRING, pattern = "yyyy-MM-dd HH:mm:ss") + private LocalDateTime timestamp; + private Boolean collected; + private Map mbeanInfo; + private String msg; + + public JMXMetricDataV1(String host, Map mbeanInfo) { + this.host = host; + this.timestamp = LocalDateTime.now(); + this.mbeanInfo = mbeanInfo; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXQuery.java b/src/main/java/org/gnuhpc/bigdata/model/JMXQuery.java new file mode 100644 index 0000000..bf03ad2 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXQuery.java @@ -0,0 +1,13 @@ +package org.gnuhpc.bigdata.model; + +import lombok.Getter; +import lombok.Setter; + +import java.util.LinkedList; + + +@Getter +@Setter +public class JMXQuery { + private LinkedList filters; +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXSimpleAttribute.java b/src/main/java/org/gnuhpc/bigdata/model/JMXSimpleAttribute.java new file mode 100644 index 0000000..0ddd4f4 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXSimpleAttribute.java @@ -0,0 +1,105 @@ +package org.gnuhpc.bigdata.model; + +import lombok.Getter; +import lombok.Setter; + +import javax.management.*; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.LinkedHashMap; +import java.util.LinkedList; + +@Getter +@Setter +public class JMXSimpleAttribute extends JMXAttribute { + private String metricType; + + public JMXSimpleAttribute(MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { + super(attribute, beanName, connection); + } + + @Override + public LinkedList> getMetrics() throws AttributeNotFoundException, + InstanceNotFoundException, MBeanException, ReflectionException, IOException { + HashMap metric = new HashMap(); + + metric.put("domain", getBeanName().getDomain()); + metric.put("beanName", getBeanName().toString()); + metric.put("attributeName", getAttributeName()); + metric.put("alias", getAlias()); + metric.put("value", castToDouble(getValue(), null)); + //metric.put("tags", getTags()); + metric.put("metric_type", getMetricType()); + LinkedList> metrics = new LinkedList>(); + metrics.add(metric); + return metrics; + } + + public boolean match(JMXConfiguration configuration) { + return matchDomain(configuration) + && matchBean(configuration) + && matchAttribute(configuration) + && !( + excludeMatchDomain(configuration) + || excludeMatchBean(configuration) + || excludeMatchAttribute(configuration)); + + } + + private boolean matchAttribute(JMXConfiguration configuration) { + JMXFilter include = configuration.getInclude(); + if (include.getAttribute() == null) { + return true; + } else if ((include.getAttribute() instanceof LinkedHashMap) + && ((LinkedHashMap) (include.getAttribute())).containsKey(getAttributeName())) { + return true; + + } else if ((include.getAttribute() instanceof ArrayList + && ((ArrayList) (include.getAttribute())).contains(getAttributeName()))) { + return true; + } + + return false; + } + + private boolean excludeMatchAttribute(JMXConfiguration configuration) { + JMXFilter exclude = configuration.getExclude(); + if (exclude == null) return false; + if (exclude.getAttribute() == null) { + return false; + } else if ((exclude.getAttribute() instanceof LinkedHashMap) + && ((LinkedHashMap) (exclude.getAttribute())).containsKey(getAttributeName())) { + return true; + + } else if ((exclude.getAttribute() instanceof ArrayList + && ((ArrayList) (exclude.getAttribute())).contains(getAttributeName()))) { + return true; + } + return false; + } + + private Object getValue() throws AttributeNotFoundException, InstanceNotFoundException, MBeanException, + ReflectionException, IOException, NumberFormatException { + return this.getJmxValue(); + } + + private String getMetricType() { + JMXFilter include = getMatchingConf().getInclude(); + if (metricType != null) { + return metricType; + } else if (include.getAttribute() instanceof LinkedHashMap) { + LinkedHashMap> attribute = (LinkedHashMap>) (include.getAttribute()); + metricType = attribute.get(getAttributeName()).get(METRIC_TYPE); + if (metricType == null) { + metricType = attribute.get(getAttributeName()).get("type"); + } + } + + if (metricType == null) { // Default to gauge + metricType = "gauge"; + } + + return metricType; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/JMXTabularAttribute.java b/src/main/java/org/gnuhpc/bigdata/model/JMXTabularAttribute.java new file mode 100644 index 0000000..916adc3 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/JMXTabularAttribute.java @@ -0,0 +1,260 @@ +package org.gnuhpc.bigdata.model; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j; + +import javax.management.*; +import javax.management.openmbean.CompositeData; +import javax.management.openmbean.InvalidKeyException; +import javax.management.openmbean.TabularData; +import java.io.IOException; +import java.util.*; + +@Getter +@Setter +@Log4j +public class JMXTabularAttribute extends JMXAttribute { + + private HashMap>> subAttributeList; + + public JMXTabularAttribute(MBeanAttributeInfo attribute, ObjectName beanName, MBeanServerConnection connection) { + super(attribute, beanName, connection); + subAttributeList = new HashMap>>(); + } + + @Override + public LinkedList> getMetrics() throws AttributeNotFoundException, + InstanceNotFoundException, MBeanException, ReflectionException, IOException { + LinkedList> metrics = new LinkedList>(); + HashMap>> subMetrics = new HashMap>>(); + + for (String dataKey : subAttributeList.keySet()) { + HashMap> subSub = subAttributeList.get(dataKey); + for (String metricKey : subSub.keySet()) { + String fullMetricKey = getAttributeName() + "." + metricKey; + + HashMap metric = subSub.get(metricKey); + metric.put("domain", getBeanName().getDomain()); + metric.put("beanName", getBeanName().toString()); + metric.put("attributeName", fullMetricKey); + if (metric.get(ALIAS) == null) { + metric.put(ALIAS, convertMetricName(getAlias(metricKey))); + } + + if (metric.get(METRIC_TYPE) == null) { + metric.put(METRIC_TYPE, getMetricType(metricKey)); + } + + /* + if (metric.get("tags") == null) { + metric.put("tags", getTags(dataKey, metricKey)); + }*/ + + metric.put("value", castToDouble(getValue(dataKey, metricKey), null)); + + if(!subMetrics.containsKey(fullMetricKey)) { + subMetrics.put(fullMetricKey, new LinkedList>()); + } + subMetrics.get(fullMetricKey).add(metric); + } + } + + for (String key : subMetrics.keySet()) { + // only add explicitly included metrics + if (getAttributesFor(key) != null) { + metrics.addAll(sortAndFilter(key, subMetrics.get(key))); + } + } + + return metrics; + } + + private Object getMetricType(String subAttribute) { + String subAttributeName = getAttribute().getName() + "." + subAttribute; + String metricType = null; + + JMXFilter include = getMatchingConf().getInclude(); + if (include.getAttribute() instanceof LinkedHashMap) { + LinkedHashMap> attribute = (LinkedHashMap>) (include.getAttribute()); + metricType = attribute.get(subAttributeName).get(METRIC_TYPE); + if (metricType == null) { + metricType = attribute.get(subAttributeName).get("type"); + } + } + + if (metricType == null) { + metricType = "gauge"; + } + + return metricType; + } + + private Object getValue(String key, String subAttribute) throws AttributeNotFoundException, + InstanceNotFoundException, + MBeanException, ReflectionException, IOException { + + try{ + Object value = this.getJmxValue(); + String attributeType = getAttribute().getType(); + + TabularData data = (TabularData) value; + for (Object rowKey : data.keySet()) { + Collection keys = (Collection) rowKey; + String pathKey = getMultiKey(keys); + if (key.equals(pathKey)) { + CompositeData compositeData = data.get(keys.toArray()); + if (subAttribute.contains(".")) { + // walk down the path + Object o; + for (String subPathKey : subAttribute.split("\\.")) { + o = compositeData.get(subPathKey); + if (o instanceof CompositeData) { + compositeData = (CompositeData) o; + } else { + return compositeData.get(subPathKey); + } + } + } else { + return compositeData.get(subAttribute); + } + } + } + } + catch (InvalidKeyException e) { + log.warn("`"+getAttribute().getName()+"` attribute does not have a `"+subAttribute+"` key."); + return null; + } + + throw new NumberFormatException(); + } + + private Map getAttributesFor(String key) { + JMXFilter include = getMatchingConf().getInclude(); + if (include != null) { + Object includeAttribute = include.getAttribute(); + if (includeAttribute instanceof LinkedHashMap) { + return (Map) ((Map)includeAttribute).get(key); + } + } + return null; + } + + private List> sortAndFilter(String metricKey, LinkedList> + metrics) { + Map attributes = getAttributesFor(metricKey); + if (!attributes.containsKey("limit")) { + return metrics; + } + Integer limit = (Integer) attributes.get("limit"); + if (metrics.size() <= limit) { + return metrics; + } + MetricComparator comp = new MetricComparator(); + Collections.sort(metrics, comp); + String sort = (String) attributes.get("sort"); + if (sort == null || sort.equals("desc")) { + metrics.subList(0, limit).clear(); + } else { + metrics.subList(metrics.size() - limit, metrics.size()).clear(); + } + return metrics; + } + + private class MetricComparator implements Comparator> { + public int compare(HashMap o1, HashMap o2) { + Double v1 = (Double) o1.get("value"); + Double v2 = (Double) o2.get("value"); + return v1.compareTo(v2); + } + } + + @Override + public boolean match(JMXConfiguration configuration) { + if (!matchDomain(configuration) + || !matchBean(configuration) + || excludeMatchDomain(configuration) + || excludeMatchBean(configuration)) { + return false; + } + + try { + populateSubAttributeList(getJmxValue()); + } catch (Exception e) { + return false; + } + + return matchAttribute(configuration);//TODO && !excludeMatchAttribute(configuration); + } + + private void populateSubAttributeList(Object value) { + TabularData data = (TabularData) value; + for (Object rowKey : data.keySet()) { + Collection keys = (Collection) rowKey; + CompositeData compositeData = data.get(keys.toArray()); + String pathKey = getMultiKey(keys); + HashMap> subAttributes = new HashMap>(); + for (String key : compositeData.getCompositeType().keySet()) { + if (compositeData.get(key) instanceof CompositeData) { + for (String subKey : ((CompositeData) compositeData.get(key)).getCompositeType().keySet()) { + subAttributes.put(key + "." + subKey, new HashMap()); + } + } else { + subAttributes.put(key, new HashMap()); + } + } + subAttributeList.put(pathKey, subAttributes); + } + } + + private boolean matchAttribute(JMXConfiguration configuration) { + if (matchSubAttribute(configuration.getInclude(), getAttributeName(), true)) { + return true; + } + + Iterator it1 = subAttributeList.keySet().iterator(); + while (it1.hasNext()) { + String key = it1.next(); + HashMap> subSub = subAttributeList.get(key); + Iterator it2 = subSub.keySet().iterator(); + while (it2.hasNext()) { + String subKey = it2.next(); + if (!matchSubAttribute(configuration.getInclude(), getAttributeName() + "." + subKey, true)) { + it2.remove(); + } + } + if (subSub.size() <= 0) { + it1.remove(); + } + } + + return subAttributeList.size() > 0; + } + + private String getMultiKey(Collection keys) { + StringBuilder sb = new StringBuilder(); + boolean first = true; + for (Object key : keys) { + if (!first) { sb.append(","); } + // I hope these have sane toString() methods + sb.append(key.toString()); + first = false; + } + return sb.toString(); + } + + private boolean matchSubAttribute(JMXFilter params, String subAttributeName, boolean matchOnEmpty) { + if ((params.getAttribute() instanceof LinkedHashMap) + && ((LinkedHashMap) (params.getAttribute())).containsKey(subAttributeName)) { + return true; + } else if ((params.getAttribute() instanceof ArrayList + && ((ArrayList) (params.getAttribute())).contains(subAttributeName))) { + return true; + } else if (params.getAttribute() == null) { + return matchOnEmpty; + } + return false; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/model/User.java b/src/main/java/org/gnuhpc/bigdata/model/User.java new file mode 100644 index 0000000..be06e96 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/model/User.java @@ -0,0 +1,26 @@ +package org.gnuhpc.bigdata.model; + +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.Setter; +import org.hibernate.validator.constraints.NotBlank; +import org.hibernate.validator.constraints.NotEmpty; + +import javax.validation.constraints.NotNull; + +@Getter +@Setter +@AllArgsConstructor +public class User { + @NotNull(message = "Username can not be null.") + @NotBlank(message = "Username can not be blank.") + private String username; + + @NotNull(message = "Password can not be null.") + @NotBlank(message = "Password can not be blank.") + private String password; + + @NotNull(message = "Role can not be null.") + @NotBlank(message = "Role can not be blank.") + private String role; +} diff --git a/src/main/java/org/gnuhpc/bigdata/security/BasicAuthenticationPoint.java b/src/main/java/org/gnuhpc/bigdata/security/BasicAuthenticationPoint.java new file mode 100644 index 0000000..3c2d795 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/security/BasicAuthenticationPoint.java @@ -0,0 +1,53 @@ +package org.gnuhpc.bigdata.security; + +import com.fasterxml.jackson.core.JsonGenerator; +import com.fasterxml.jackson.databind.JsonSerializer; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializerProvider; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; +import lombok.NoArgsConstructor; +import org.gnuhpc.bigdata.exception.RestErrorResponse; +import org.springframework.boot.jackson.JsonComponent; +import org.springframework.http.HttpStatus; +import org.springframework.security.core.AuthenticationException; +import org.springframework.security.web.authentication.www.BasicAuthenticationEntryPoint; +import org.springframework.stereotype.Component; + +import javax.servlet.ServletException; +import javax.servlet.http.HttpServletRequest; +import javax.servlet.http.HttpServletResponse; +import java.io.IOException; +import java.time.LocalDateTime; +import java.time.format.DateTimeFormatter; + +@Component +public class BasicAuthenticationPoint extends BasicAuthenticationEntryPoint { + @Override + public void commence(HttpServletRequest request, HttpServletResponse response, AuthenticationException authEx) + throws IOException, ServletException { + response.addHeader("WWW-Authenticate", "Basic realm=" +getRealmName()); + response.setStatus(HttpServletResponse.SC_UNAUTHORIZED); + String error = "Authenciation Error:" + authEx.getClass().getCanonicalName(); + RestErrorResponse restAuthenticationError = new RestErrorResponse(HttpStatus.UNAUTHORIZED, error, authEx); + ObjectMapper mapper = new ObjectMapper(); + JavaTimeModule javaTimeModule = new JavaTimeModule(); + javaTimeModule.addSerializer(LocalDateTime.class, new LocalDateTimeSerializer()); + mapper.registerModule(javaTimeModule); + response.getWriter().print(mapper.writeValueAsString(restAuthenticationError)); + } + + @Override + public void afterPropertiesSet() throws Exception { + setRealmName("Contact Big Data Infrastructure Team to get available accounts."); + super.afterPropertiesSet(); + } + + @JsonComponent + @NoArgsConstructor + private class LocalDateTimeSerializer extends JsonSerializer { + @Override + public void serialize(LocalDateTime value, JsonGenerator gen, SerializerProvider sp) throws IOException{ + gen.writeString(value.format(DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss"))); + } + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/security/UserDetailsServiceImp.java b/src/main/java/org/gnuhpc/bigdata/security/UserDetailsServiceImp.java new file mode 100644 index 0000000..2bb09e7 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/security/UserDetailsServiceImp.java @@ -0,0 +1,89 @@ +package org.gnuhpc.bigdata.security; + +import com.google.common.util.concurrent.ThreadFactoryBuilder; +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.config.WebSecurityConfig; +import org.gnuhpc.bigdata.model.User; +import org.gnuhpc.bigdata.utils.CommonUtils; +import org.springframework.security.core.userdetails.User.UserBuilder; +import org.springframework.security.core.userdetails.UserDetails; +import org.springframework.security.core.userdetails.UserDetailsService; +import org.springframework.security.core.userdetails.UsernameNotFoundException; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +@Log4j +public class UserDetailsServiceImp implements UserDetailsService { + private ScheduledExecutorService securityFileChecker; + private ArrayList userList = new ArrayList<>(); + + public UserDetailsServiceImp(boolean checkSecurity, int checkInitDelay, int checkSecurityInterval) { + if (checkSecurity) { + securityFileChecker = Executors.newSingleThreadScheduledExecutor( + new ThreadFactoryBuilder().setNameFormat("securityFileChecker").build()); + securityFileChecker.scheduleWithFixedDelay(new SecurityFileCheckerRunnable(), + checkInitDelay, checkSecurityInterval, TimeUnit.SECONDS); + userList = fetchUserListFromSecurtiyFile(); + } + } + + @Override + public UserDetails loadUserByUsername(String username) throws UsernameNotFoundException { + User user = findUserByUsername(username); + + UserBuilder builder; + if (user != null) { + builder = org.springframework.security.core.userdetails.User.withUsername(username); + builder.password(user.getPassword()); + builder.roles(user.getRole()); + } else { + throw new UsernameNotFoundException("User not found."); + } + + return builder.build(); + } + + private User findUserByUsername(String username) { + for (User user:userList) { + if (username.equals(user.getUsername())) { + return user; + } + } + return null; + } + + private ArrayList fetchUserListFromSecurtiyFile() { + String securityFilePath = WebSecurityConfig.SECURITY_FILE_PATH; + try { + HashMap accounts = CommonUtils.yamlParse(securityFilePath); + userList.clear(); + accounts.forEach((key, value)->{ + String username = (String)key; + Map userInfo = (Map)value; + userList.add(new User(username, userInfo.get("password"), userInfo.get("role"))); + }); + } catch (IOException ioException) { + log.error("Security file process exception.", ioException); + } + + return userList; + } + + private class SecurityFileCheckerRunnable implements Runnable { + @Override + public void run() { + try { + userList = fetchUserListFromSecurtiyFile(); + } catch (Throwable t) { + log.error("Uncaught exception in SecurityFileChecker thread", t); + } + } + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/service/CollectorService.java b/src/main/java/org/gnuhpc/bigdata/service/CollectorService.java new file mode 100644 index 0000000..e3bcade --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/service/CollectorService.java @@ -0,0 +1,248 @@ +package org.gnuhpc.bigdata.service; + +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.config.JMXConfig; +import org.gnuhpc.bigdata.exception.CollectorException; +import org.gnuhpc.bigdata.model.*; +import org.gnuhpc.bigdata.utils.CommonUtils; +import org.json.JSONObject; +import org.springframework.stereotype.Service; +import org.springframework.validation.annotation.Validated; + +import javax.management.MBeanAttributeInfo; +import javax.management.MBeanInfo; +import javax.management.MBeanServerConnection; +import javax.management.ObjectName; +import java.io.File; +import java.io.IOException; +import java.util.*; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +@Service +@Log4j +@Validated +public class CollectorService { + private final static List SIMPLE_TYPES = Arrays.asList("long", + "java.lang.String", "int", "float", "double", "java.lang.Double","java.lang.Float", "java.lang.Integer", "java.lang.Long", + "java.util.concurrent.atomic.AtomicInteger", "java.util.concurrent.atomic.AtomicLong", + "java.lang.Object", "java.lang.Boolean", "boolean", "java.lang.Number"); + private final static List COMPOSED_TYPES = Arrays.asList("javax.management.openmbean.CompositeData", "java.util.HashMap", "java.util.Map"); + private final static List MULTI_TYPES = Arrays.asList("javax.management.openmbean.TabularData"); + + public List collectJMXData(String jmxurl) { + LinkedList jmxMetricDataList = new LinkedList<>(); + String[] hostList = jmxurl.split(","); + for (String host : hostList) { + JMXClient jmxClient = new JMXClient(host); + Map metricData = new HashMap<>(); + JMXMetricDataV1 jmxMetricData = new JMXMetricDataV1(host, metricData); + try { + log.info("Start to collect JMXServiceURL:" + jmxClient.getJmxServiceURL()); + jmxClient.connectWithTimeout(); + MBeanServerConnection mBeanServerConnection = jmxClient.getJmxConnector().getMBeanServerConnection(); + Set objectNames = mBeanServerConnection.queryNames(null, null); + for (ObjectName objectName : objectNames) { + Map attributeInfoMap = getAttributeInfoByObjectName(mBeanServerConnection, objectName); + metricData.put(objectName.toString(), attributeInfoMap); + } + jmxMetricData.setCollected(true); + } catch (Exception e) { + jmxMetricData.setCollected(false); + CollectorException ce = new CollectorException(String.format("%s occurred. URL: %s. Reason: %s", + e.getClass().getCanonicalName(), jmxClient.getJmxServiceURL(), e.getCause()), e); + jmxMetricData.setMsg(ce.getLocalizedMessage()); + log.error("Failed to connect to " + jmxClient.getJmxServiceURL(), ce); + } finally { + jmxMetricDataList.add(jmxMetricData); + if (jmxClient.getJmxConnector() != null) { + try { + jmxClient.close(); + } catch (Throwable t) { + log.error("Connection close error occurred. ", t); + } + } + } + } + + return jmxMetricDataList; + } + + public List collectJMXData(String jmxurl, JMXQuery jmxQuery) { + List jmxMetricDataList = new ArrayList<>(); + LinkedList configurationList = jmxQuery.getFilters(); + LinkedList beanScopes = JMXConfiguration.getGreatestCommonScopes(configurationList); + Set beans = new HashSet<>(); + LinkedList matchingAttributes = new LinkedList<>(); + LinkedList> metrics = new LinkedList<>(); + + String[] hostList = jmxurl.split(","); + + for (String host : hostList) { + JMXClient jmxClient = new JMXClient(host); + beans.clear(); + matchingAttributes.clear(); + metrics.clear(); + JMXMetricData jmxMetricData = new JMXMetricData(host, metrics); + try { + jmxClient.connectWithTimeout(); + MBeanServerConnection mBeanServerConnection = jmxClient.getJmxConnector().getMBeanServerConnection(); + for (String scope : beanScopes) { + ObjectName name = new ObjectName(scope); + beans.addAll(mBeanServerConnection.queryNames(name, null)); + } + beans = (beans.isEmpty()) ? mBeanServerConnection.queryNames(null, null) : beans; + getMatchingAttributes(matchingAttributes, mBeanServerConnection, beans, configurationList); + jmxMetricData.setMetrics(getMetrics(matchingAttributes)); + jmxMetricData.setCollected(true); + } catch (Exception e) { + jmxMetricData.setCollected(false); + CollectorException ce = new CollectorException(String.format("%s occurred. URL: %s. Reason: %s", + e.getClass().getCanonicalName(), jmxClient.getJmxServiceURL(), e.getCause()), e); + jmxMetricData.setMsg(ce.getLocalizedMessage()); + log.error("Failed to connect to " + jmxClient.getJmxServiceURL(), ce); + } finally { + jmxMetricDataList.add(jmxMetricData); + try { + if (jmxClient.getJmxConnector() != null) { + jmxClient.close(); + } + } catch (Throwable t) { + log.error("Connection close error occurred. ", t); + } + } + } + return jmxMetricDataList; + } + + private void getMatchingAttributes(LinkedList matchingAttributes, MBeanServerConnection mBeanServerConnection, Set beans, + LinkedList configurationList) { + for (ObjectName beanName : beans) { + MBeanAttributeInfo[] attributeInfos; + try { + attributeInfos = mBeanServerConnection.getMBeanInfo(beanName).getAttributes(); + } catch (Exception e) { + CollectorException ce = new CollectorException(String.format("Get bean's attributes exception. BeanName: %s. Reason: %s", + beanName, e.getCause()), e); + log.error("Failed to get bean attributes. BeanName is " + beanName, ce); + continue; + } + + for (MBeanAttributeInfo attributeInfo: attributeInfos) { + JMXAttribute jmxAttribute; + String attributeType = attributeInfo.getType(); + if (SIMPLE_TYPES.contains(attributeType)) { + log.debug(beanName + " : " + attributeInfo + " has attributeInfo simple type"); + jmxAttribute = new JMXSimpleAttribute(attributeInfo, beanName, mBeanServerConnection); + } else if (COMPOSED_TYPES.contains(attributeType)) { + log.debug(beanName + " : " + attributeInfo + " has attributeInfo composite type"); + jmxAttribute = new JMXComplexAttribute(attributeInfo, beanName, mBeanServerConnection); + } else if (MULTI_TYPES.contains(attributeType)) { + log.debug(beanName + " : " + attributeInfo + " has attributeInfo tabular type"); + jmxAttribute = new JMXTabularAttribute(attributeInfo, beanName, mBeanServerConnection); + } else { + //try { + log.debug(beanName + " : " + attributeInfo + " has an unsupported type: " + attributeType); + //} catch (NullPointerException e) { + // log.error("Caught unexpected NullPointerException"); + //} + continue; + } + for (JMXConfiguration conf: configurationList) { + if (jmxAttribute.match(conf)) { + jmxAttribute.setMatchingConf(conf); + matchingAttributes.add(jmxAttribute); + log.debug(" Matching Attribute: " + jmxAttribute.getAttributeName() + + ", BeanName:" + beanName.getCanonicalName()); + } + } + } + } + } + + private Map getAttributeInfoByObjectName(MBeanServerConnection mBeanServerConnection, + ObjectName objectName) { + Map attributeInfoMap = new HashMap<>(); + try { + MBeanInfo mbeanInfo = mBeanServerConnection.getMBeanInfo(objectName); + MBeanAttributeInfo[] mBeanAttributeInfoList = mbeanInfo.getAttributes(); + log.debug("objectName:" + objectName.toString()); + for (MBeanAttributeInfo info : mBeanAttributeInfoList) { + String attributeName = info.getName(); + String attributeValue = ""; + try { + attributeValue = mBeanServerConnection.getAttribute(objectName, info.getName()).toString(); + } catch (Exception e) { + attributeValue = "Unavailable"; + log.info("Exception occured when collect ObjectName:" + objectName + ", AttributeName:" + attributeName, e); + } + attributeInfoMap.put(attributeName, attributeValue); + } + } catch (Exception e) { + attributeInfoMap.put("collected", "false"); + log.info("Exception occured when collect ObjectName:" + objectName, e); + } + return attributeInfoMap; + } + + public LinkedList> getMetrics(LinkedList matchingAttributes) throws IOException { + LinkedList> metrics = new LinkedList>(); + Iterator it = matchingAttributes.iterator(); + + while (it.hasNext()) { + JMXAttribute jmxAttr = it.next(); + try { + LinkedList> jmxAttrMetrics = jmxAttr.getMetrics(); + for (HashMap m : jmxAttrMetrics) { + //m.put("check_name", this.checkName); + metrics.add(m); + JSONObject metricJson = new JSONObject(m); + } + } catch (IOException e) { + throw e; + } catch (Exception e) { + log.debug("Cannot get metrics for attribute: " + jmxAttr, e); + } + } + + return metrics; + } + + public HashMap listJMXFilterTemplate(String filterKey) { + HashMap filterTemplateMap = new HashMap<>(); + HashMap yamlHash; + String projectRootPath = ""; + try { + File jmxFilterDir = new File(JMXConfig.JMX_FILTER_DIR); + if (!jmxFilterDir.exists() || !jmxFilterDir.isDirectory()) { + throw new IOException(); + } + for (File yamlFile:jmxFilterDir.listFiles()) { + String fileFullName = yamlFile.getName(); + log.info("Found JMXFilterTemplate filename=" + fileFullName); + if (matchIgnoreCase(filterKey, fileFullName)) { + String[] fileNames = fileFullName.split("\\."); + yamlHash = CommonUtils.yamlParse(yamlFile); + filterTemplateMap.put(fileNames[0], yamlHash); + } + } + } catch (IOException e) { + CollectorException ce = new CollectorException(String.format("%s occurred. Reason:%s. Advice:"+ + "Create a directory named JMXFilterTemplate to include filter templates in the project root path:%s.", + e.getClass().getCanonicalName(), e.getLocalizedMessage(), projectRootPath), e); + log.error("JMXFilterTemplate path does not exist."); + filterTemplateMap.put("error", ce.getLocalizedMessage()); + } + + return filterTemplateMap; + } + + boolean matchIgnoreCase(String regex, String string) { + Pattern pattern = Pattern.compile(regex, Pattern.CASE_INSENSITIVE); + Matcher matcher = pattern.matcher(string); + + boolean match = matcher.find(); + + return match; + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/service/KafkaAdminService.java b/src/main/java/org/gnuhpc/bigdata/service/KafkaAdminService.java index fc171a7..8fd363a 100644 --- a/src/main/java/org/gnuhpc/bigdata/service/KafkaAdminService.java +++ b/src/main/java/org/gnuhpc/bigdata/service/KafkaAdminService.java @@ -1,11 +1,38 @@ package org.gnuhpc.bigdata.service; +import static java.lang.String.format; +import static java.util.stream.Collectors.toList; +import static java.util.stream.Collectors.toSet; + import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonDeserializer; -import kafka.admin.*; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.function.Function; +import java.util.stream.Collectors; +import javax.annotation.PostConstruct; +import kafka.admin.AdminClient; +import kafka.admin.AdminUtils; +import kafka.admin.RackAwareMode; +import kafka.admin.ReassignPartitionsCommand; +import kafka.admin.ReassignmentStatus; +import kafka.admin.TopicCommand; import kafka.api.PartitionOffsetRequestInfo; import kafka.cluster.Broker; import kafka.common.OffsetAndMetadata; @@ -22,24 +49,34 @@ import lombok.extern.log4j.Log4j; import org.apache.commons.lang3.StringUtils; import org.apache.curator.framework.CuratorFramework; -import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; -import org.apache.kafka.common.KafkaException; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.clients.producer.RecordMetadata; import org.apache.kafka.common.Node; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.errors.ApiException; import org.apache.kafka.common.errors.InvalidTopicException; import org.apache.kafka.common.requests.MetadataResponse; -import org.apache.kafka.common.serialization.StringDeserializer; import org.gnuhpc.bigdata.CollectionConvertor; import org.gnuhpc.bigdata.componet.OffsetStorage; -import org.gnuhpc.bigdata.constant.ConsumerState; +import org.gnuhpc.bigdata.config.KafkaConfig; import org.gnuhpc.bigdata.constant.ConsumerType; import org.gnuhpc.bigdata.constant.GeneralResponseState; -import org.gnuhpc.bigdata.model.*; +import org.gnuhpc.bigdata.model.AddPartition; +import org.gnuhpc.bigdata.model.BrokerInfo; +import org.gnuhpc.bigdata.model.ConsumerGroupDesc; +import org.gnuhpc.bigdata.model.ConsumerGroupDescFactory; +import org.gnuhpc.bigdata.model.GeneralResponse; +import org.gnuhpc.bigdata.model.HealthCheckResult; +import org.gnuhpc.bigdata.model.ReassignWrapper; +import org.gnuhpc.bigdata.model.TopicBrief; +import org.gnuhpc.bigdata.model.TopicDetail; +import org.gnuhpc.bigdata.model.TopicMeta; +import org.gnuhpc.bigdata.model.TopicPartitionInfo; import org.gnuhpc.bigdata.task.FetchOffSetFromZKResult; import org.gnuhpc.bigdata.task.FetchOffsetFromZKTask; import org.gnuhpc.bigdata.utils.KafkaUtils; @@ -55,995 +92,1219 @@ import scala.collection.JavaConverters; import scala.collection.Seq; -import javax.annotation.PostConstruct; -import java.util.*; -import java.util.concurrent.*; -import java.util.function.Function; -import java.util.stream.Collectors; - -import static java.lang.String.format; -import static java.util.stream.Collectors.toList; -import static java.util.stream.Collectors.toSet; - -/** - * Created by gnuhpc on 2017/7/17. - */ - +/** Created by gnuhpc on 2017/7/17. */ @Service @Log4j @Validated public class KafkaAdminService { - private static final int channelSocketTimeoutMs = 600; - private static final int channelRetryBackoffMs = 600; - private static final String CONSUMERPATHPREFIX = "/consumers/"; - private static final String OFFSETSPATHPREFIX = "/offsets/"; - @Autowired - private ZookeeperUtils zookeeperUtils; + private static final int channelSocketTimeoutMs = 600; + private static final int channelRetryBackoffMs = 600; + private static final String CONSUMERPATHPREFIX = "/consumers/"; + private static final String OFFSETSPATHPREFIX = "/offsets/"; + @Autowired private ZookeeperUtils zookeeperUtils; - @Autowired - private KafkaUtils kafkaUtils; + @Autowired private KafkaUtils kafkaUtils; - @Autowired - private OffsetStorage storage; + @Autowired private KafkaConfig kafkaConfig; - //For AdminUtils use - private ZkUtils zkUtils; + @Autowired private OffsetStorage storage; - //For zookeeper connection - private CuratorFramework zkClient; + // For AdminUtils use + private ZkUtils zkUtils; - //For Json serialized - private Gson gson; + // For zookeeper connection + private CuratorFramework zkClient; - private scala.Option NONE = scala.Option.apply(null); + // For Json serialized + private Gson gson; - @PostConstruct - private void init() { - this.zkUtils = zookeeperUtils.getZkUtils(); - this.zkClient = zookeeperUtils.getCuratorClient(); - GsonBuilder builder = new GsonBuilder(); - builder.registerTypeAdapter(DateTime.class, (JsonDeserializer) (jsonElement, type, jsonDeserializationContext) -> new DateTime(jsonElement.getAsJsonPrimitive().getAsLong())); + private scala.Option NONE = scala.Option.apply(null); - this.gson = builder.create(); - } - - public TopicMeta createTopic(TopicDetail topic, String reassignStr) { - if (StringUtils.isEmpty(topic.getName())) { - throw new InvalidTopicException("Empty topic name"); - } - - if (Topic.hasCollisionChars(topic.getName())) { - throw new InvalidTopicException("Invalid topic name"); - } - - if (Strings.isNullOrEmpty(reassignStr) && topic.getPartitions() <= 0) { - throw new InvalidTopicException("Number of partitions must be larger than 0"); - } - Topic.validate(topic.getName()); - - - if (Strings.isNullOrEmpty(reassignStr)) { - AdminUtils.createTopic(zkUtils, - topic.getName(), topic.getPartitions(), topic.getFactor(), - topic.getProp(), RackAwareMode.Enforced$.MODULE$); - } else { - List argsList = new ArrayList<>(); - argsList.add("--topic"); - argsList.add(topic.getName()); - - if (topic.getProp().stringPropertyNames().size() != 0) { - argsList.add("--config"); - - for (String key : topic.getProp().stringPropertyNames()) { - argsList.add(key + "=" + topic.getProp().get(key)); - } - } - argsList.add("--replica-assignment"); - argsList.add(reassignStr); - - TopicCommand.createTopic(zkUtils, new TopicCommand.TopicCommandOptions(argsList.stream().toArray(String[]::new))); - } - - - try { - //Wait for a second for metadata propergating - Thread.sleep(3000); - } catch (InterruptedException e) { - e.printStackTrace(); - } - return describeTopic(topic.getName()); - } - - public List listTopics() { - return CollectionConvertor.seqConvertJavaList(zkUtils.getAllTopics()); - } - - public List listTopicBrief() { - KafkaConsumer consumer = kafkaUtils.createNewConsumer(); - Map> topicMap = consumer.listTopics(); - List result = topicMap.entrySet().parallelStream().map(e -> { - String topic = e.getKey(); - long replicateCount = e.getValue().parallelStream().flatMap(pi -> Arrays.stream(pi.replicas())).count(); - long isrCount = e.getValue().parallelStream().flatMap(pi -> Arrays.stream(pi.inSyncReplicas())).count(); - if (replicateCount == 0) { - return new TopicBrief(topic, e.getValue().size(), 0); - } else { - return new TopicBrief(topic, e.getValue().size(), ((double) isrCount / replicateCount)); - } - } - ).collect(toList()); - - consumer.close(); - - return result; - } + @PostConstruct + private void init() { + this.zkUtils = zookeeperUtils.getZkUtils(); + this.zkClient = zookeeperUtils.getCuratorClient(); + GsonBuilder builder = new GsonBuilder(); + builder.registerTypeAdapter( + DateTime.class, + (JsonDeserializer) + (jsonElement, type, jsonDeserializationContext) -> + new DateTime(jsonElement.getAsJsonPrimitive().getAsLong())); - public boolean existTopic(String topicName) { - return AdminUtils.topicExists(zkUtils, topicName); - } + this.gson = builder.create(); + } - public List listBrokers() { - List brokerList = CollectionConvertor.seqConvertJavaList(zkUtils.getAllBrokersInCluster()); - return brokerList.parallelStream().collect(Collectors.toMap(Broker::id, Broker::rack)).entrySet().parallelStream() - .map(entry -> { - String brokerInfoStr = null; - try { - brokerInfoStr = new String( - zkClient.getData().forPath(ZkUtils.BrokerIdsPath() + "/" + entry.getKey()) - ); - } catch (Exception e) { - e.printStackTrace(); - } - BrokerInfo brokerInfo = gson.fromJson(brokerInfoStr, BrokerInfo.class); - if (entry.getValue().isEmpty()) - brokerInfo.setRack(""); - else { - brokerInfo.setRack(entry.getValue().get()); - } - brokerInfo.setId(entry.getKey()); - return brokerInfo; - }).collect(toList()); + public TopicMeta createTopic(TopicDetail topic, String reassignStr) { + if (existTopic(topic.getName())) { + throw new InvalidTopicException("Topic:" + topic.getName() + " already exist."); } - - public TopicMeta describeTopic(@TopicExistConstraint String topicName) { - KafkaConsumer consumer = kafkaUtils.createNewConsumer(); - TopicMeta topicMeta = new TopicMeta(topicName); - List tmList = consumer.partitionsFor(topicName); - topicMeta.setPartitionCount(tmList.size()); - topicMeta.setReplicationFactor(tmList.get(0).replicas().length); - topicMeta.setTopicCustomConfigs(getTopicPropsFromZk(topicName)); - topicMeta.setTopicPartitionInfos(tmList.parallelStream().map( - tm -> { - TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(); - topicPartitionInfo.setLeader(tm.leader().host()); - topicPartitionInfo.setIsr(Arrays.stream(tm.inSyncReplicas()).map(node -> node.host()).collect(toList())); - topicPartitionInfo.setPartitionId(tm.partition()); - topicPartitionInfo.setReplicas(Arrays.stream(tm.replicas()).map(node -> node.host()).collect(toList())); - topicPartitionInfo.setIn_sync(); - topicPartitionInfo.setStartOffset(getBeginningOffset(tm.leader(), tm.topic(), tm.partition())); - topicPartitionInfo.setEndOffset(getEndOffset(tm.leader(), tm.topic(), tm.partition())); - topicPartitionInfo.setMessageAvailable(); - return topicPartitionInfo; - - }).collect(toList()) - ); - - Collections.sort(topicMeta.getTopicPartitionInfos()); - - consumer.close(); - - return topicMeta; + if (StringUtils.isEmpty(topic.getName())) { + throw new InvalidTopicException("Empty topic name"); } - public GeneralResponse deleteTopic(@TopicExistConstraint String topic) { - log.warn("Delete topic " + topic); - AdminUtils.deleteTopic(zkUtils, topic); - - return new GeneralResponse(GeneralResponseState.success, topic + " has been deleted."); + if (Topic.hasCollisionChars(topic.getName())) { + throw new InvalidTopicException("Invalid topic name"); } - public Properties createTopicConf(@TopicExistConstraint String topic, Properties prop) { - Properties configs = getTopicPropsFromZk(topic); - configs.putAll(prop); - AdminUtils.changeTopicConfig(zkUtils, topic, configs); - log.info("Create config for topic: " + topic + "Configs:" + configs); - return getTopicPropsFromZk(topic); + if (Strings.isNullOrEmpty(reassignStr) && topic.getPartitions() <= 0) { + throw new InvalidTopicException("Number of partitions must be larger than 0"); } - - public Properties deleteTopicConf(@TopicExistConstraint String topic, List deleteProps) { - // compile the final set of configs - Properties configs = getTopicPropsFromZk(topic); - deleteProps.stream().forEach(config -> configs.remove(config)); - AdminUtils.changeTopicConfig(zkUtils, topic, configs); - log.info("Delete config for topic: " + topic); - return getTopicPropsFromZk(topic); + Topic.validate(topic.getName()); + + if (Strings.isNullOrEmpty(reassignStr)) { + try { + AdminUtils.createTopic( + zkUtils, + topic.getName(), + topic.getPartitions(), + topic.getFactor(), + topic.getProp(), + RackAwareMode.Enforced$.MODULE$); + } catch (Exception exception) { + throw new ApiException("Create topic exception." + exception); + } + } else { + List argsList = new ArrayList<>(); + argsList.add("--topic"); + argsList.add(topic.getName()); + + if (topic.getProp().stringPropertyNames().size() != 0) { + argsList.add("--config"); + + for (String key : topic.getProp().stringPropertyNames()) { + argsList.add(key + "=" + topic.getProp().get(key)); + } + } + argsList.add("--replica-assignment"); + argsList.add(reassignStr); + + try { + TopicCommand.createTopic( + zkUtils, + new TopicCommand.TopicCommandOptions(argsList.stream().toArray(String[]::new))); + } catch (Exception exception) { + throw new ApiException("Create topic exception." + exception); + } } - public Properties updateTopicConf(@TopicExistConstraint String topic, Properties prop) { - AdminUtils.changeTopicConfig(zkUtils, topic, prop); - return getTopicPropsFromZk(topic); + try { + // Wait for a second for metadata propergating + Thread.sleep(3000); + } catch (InterruptedException e) { + e.printStackTrace(); } - - public Properties getTopicConf(@TopicExistConstraint String topic) { - return getTopicPropsFromZk(topic); + return describeTopic(topic.getName()); + } + + public List listTopics() { + return CollectionConvertor.seqConvertJavaList(zkUtils.getAllTopics()); + } + + public List listTopicBrief() { + KafkaConsumer consumer = kafkaUtils.createNewConsumer(); + Map> topicMap = consumer.listTopics(); + List result = + topicMap + .entrySet() + .parallelStream() + .map( + e -> { + String topic = e.getKey(); + long replicateCount = + e.getValue() + .parallelStream() + .flatMap(pi -> Arrays.stream(pi.replicas())) + .count(); + long isrCount = + e.getValue() + .parallelStream() + .flatMap(pi -> Arrays.stream(pi.inSyncReplicas())) + .count(); + if (replicateCount == 0) { + return new TopicBrief(topic, e.getValue().size(), 0); + } else { + return new TopicBrief( + topic, e.getValue().size(), ((double) isrCount / replicateCount)); + } + }) + .collect(toList()); + + consumer.close(); + + return result; + } + + public boolean existTopic(String topicName) { + return AdminUtils.topicExists(zkUtils, topicName); + } + + public List listBrokers() { + List brokerList = + CollectionConvertor.seqConvertJavaList(zkUtils.getAllBrokersInCluster()); + return brokerList + .parallelStream() + .collect(Collectors.toMap(Broker::id, Broker::rack)) + .entrySet() + .parallelStream() + .map( + entry -> { + String brokerInfoStr = null; + try { + brokerInfoStr = + new String( + zkClient.getData().forPath(ZkUtils.BrokerIdsPath() + "/" + entry.getKey())); + } catch (Exception e) { + e.printStackTrace(); + } + BrokerInfo brokerInfo = gson.fromJson(brokerInfoStr, BrokerInfo.class); + if (entry.getValue().isEmpty()) brokerInfo.setRack(""); + else { + brokerInfo.setRack(entry.getValue().get()); + } + brokerInfo.setId(entry.getKey()); + return brokerInfo; + }) + .collect(toList()); + } + + public TopicMeta describeTopic(@TopicExistConstraint String topicName) { + KafkaConsumer consumer = kafkaUtils.createNewConsumer(); + TopicMeta topicMeta = new TopicMeta(topicName); + List tmList = consumer.partitionsFor(topicName); + topicMeta.setPartitionCount(tmList.size()); + topicMeta.setReplicationFactor(tmList.get(0).replicas().length); + topicMeta.setTopicCustomConfigs(getTopicPropsFromZk(topicName)); + topicMeta.setTopicPartitionInfos( + tmList + .parallelStream() + .map( + tm -> { + TopicPartitionInfo topicPartitionInfo = new TopicPartitionInfo(); + topicPartitionInfo.setLeader(tm.leader().host()); + topicPartitionInfo.setIsr( + Arrays.stream(tm.inSyncReplicas()) + .map(node -> node.host()) + .collect(toList())); + topicPartitionInfo.setPartitionId(tm.partition()); + topicPartitionInfo.setReplicas( + Arrays.stream(tm.replicas()).map(node -> node.host()).collect(toList())); + topicPartitionInfo.setIn_sync(); + topicPartitionInfo.setStartOffset( + getBeginningOffset(tm.leader(), tm.topic(), tm.partition())); + topicPartitionInfo.setEndOffset( + getEndOffset(tm.leader(), tm.topic(), tm.partition())); + topicPartitionInfo.setMessageAvailable(); + return topicPartitionInfo; + }) + .collect(toList())); + + Collections.sort(topicMeta.getTopicPartitionInfos()); + + consumer.close(); + + return topicMeta; + } + + public GeneralResponse deleteTopic(@TopicExistConstraint String topic) { + log.warn("Delete topic " + topic); + AdminUtils.deleteTopic(zkUtils, topic); + + return new GeneralResponse(GeneralResponseState.success, topic + " has been deleted."); + } + + public Properties createTopicConf(@TopicExistConstraint String topic, Properties prop) { + Properties configs = getTopicPropsFromZk(topic); + configs.putAll(prop); + AdminUtils.changeTopicConfig(zkUtils, topic, configs); + log.info("Create config for topic: " + topic + "Configs:" + configs); + return getTopicPropsFromZk(topic); + } + + public Properties deleteTopicConf(@TopicExistConstraint String topic, List deleteProps) { + // compile the final set of configs + Properties configs = getTopicPropsFromZk(topic); + deleteProps.stream().forEach(config -> configs.remove(config)); + AdminUtils.changeTopicConfig(zkUtils, topic, configs); + log.info("Delete config for topic: " + topic); + return getTopicPropsFromZk(topic); + } + + public Properties updateTopicConf(@TopicExistConstraint String topic, Properties prop) { + AdminUtils.changeTopicConfig(zkUtils, topic, prop); + return getTopicPropsFromZk(topic); + } + + public Properties getTopicConf(@TopicExistConstraint String topic) { + return getTopicPropsFromZk(topic); + } + + public Properties getTopicConfByKey(@TopicExistConstraint String topic, String key) { + String value = + String.valueOf(AdminUtils.fetchEntityConfig(zkUtils, ConfigType.Topic(), topic).get(key)); + Properties returnProps = new Properties(); + if (!value.equals("null")) { + returnProps.setProperty(key, value); + return returnProps; + } else return null; + } + + public boolean deleteTopicConfByKey(@TopicExistConstraint String topic, String key) { + Properties configs = getTopicPropsFromZk(topic); + configs.remove(key); + AdminUtils.changeTopicConfig(zkUtils, topic, configs); + return getTopicPropsFromZk(topic).get(key) == null; + } + + public Properties updateTopicConfByKey( + @TopicExistConstraint String topic, String key, String value) { + Properties props = getTopicConf(topic); + props.setProperty(key, value); + String validValue = String.valueOf(updateTopicConf(topic, props).get(key)); + if (!validValue.equals("null") && validValue.equals(value)) { + return props; + } else { + throw new ApiException("Update Topic Config failed: " + key + ":" + value); } - - public Properties getTopicConfByKey(@TopicExistConstraint String topic, String key) { - String value = String.valueOf(AdminUtils.fetchEntityConfig(zkUtils, ConfigType.Topic(), topic).get(key)); - Properties returnProps = new Properties(); - if (!value.equals("null")) { - returnProps.setProperty(key, value); - return returnProps; - } else - return null; + } + + public Properties createTopicConfByKey( + @TopicExistConstraint String topic, String key, String value) { + Properties props = new Properties(); + props.setProperty(key, value); + String validValue = String.valueOf(createTopicConf(topic, props).get(key)); + if (!validValue.equals("null") && validValue.equals(value)) { + return props; + } else { + throw new ApiException("Update Topic Config failed: " + key + ":" + value); } - - public boolean deleteTopicConfByKey(@TopicExistConstraint String topic, String key) { - Properties configs = getTopicPropsFromZk(topic); - configs.remove(key); - AdminUtils.changeTopicConfig(zkUtils, topic, configs); - return getTopicPropsFromZk(topic).get(key) == null; + } + + public TopicMeta addPartition(@TopicExistConstraint String topic, AddPartition addPartition) { + List partitionMataData = + AdminUtils.fetchTopicMetadataFromZk(topic, zkUtils).partitionMetadata(); + int numPartitions = partitionMataData.size(); + int numReplica = partitionMataData.get(0).replicas().size(); + List brokerIdList = + listBrokers().stream().map(broker -> broker.getId()).collect(toList()); + List partitionIdList = + partitionMataData.stream().map(p -> String.valueOf(p.partition())).collect(toList()); + String assignmentStr = addPartition.getReplicaAssignment(); + String toBeSetReplicaAssignmentStr = ""; + + if (assignmentStr != null && !assignmentStr.equals("")) { + // Check out of index ids in replica assignment string + String[] ids = addPartition.getReplicaAssignment().split(",|:"); + if (Arrays.stream(ids).filter(id -> brokerIdList.contains(id)).count() != 0) { + throw new InvalidTopicException( + "Topic " + topic + ": manual reassignment str has wrong id!"); + } + + // Check if any ids duplicated in one partition in replica assignment + String[] assignPartitions = addPartition.getReplicaAssignment().split(","); + if (Arrays.stream(assignPartitions) + .filter( + p -> + Arrays.stream(p.split(":")).collect(Collectors.toSet()).size() + != p.split(":").length) + .count() + != 0) { + throw new InvalidTopicException( + "Topic " + topic + ": manual reassignment str has duplicated id in one partition!"); + } + + String replicaStr = Strings.repeat("0:", numReplica).replaceFirst(".$", ","); + toBeSetReplicaAssignmentStr = + Strings.repeat(replicaStr, numPartitions) + addPartition.getReplicaAssignment(); + } else { + toBeSetReplicaAssignmentStr = ""; } - public Properties updateTopicConfByKey(@TopicExistConstraint String topic, String key, String value) { - Properties props = getTopicConf(topic); - props.setProperty(key, value); - String validValue = String.valueOf(updateTopicConf(topic, props).get(key)); - if (!validValue.equals("null") && validValue.equals(value)) { - return props; - } else { - throw new ApiException("Update Topic Config failed: " + key + ":" + value); + AdminUtils.addPartitions( + zkUtils, + topic, + addPartition.getNumPartitionsAdded() + numPartitions, + toBeSetReplicaAssignmentStr, + true, + RackAwareMode.Enforced$.MODULE$); + + return describeTopic(topic); + } + + // Return + public List generateReassignPartition(ReassignWrapper reassignWrapper) { + Seq brokerSeq = + JavaConverters.asScalaBufferConverter(reassignWrapper.getBrokers()).asScala().toSeq(); + // + Tuple2 resultTuple2 = + ReassignPartitionsCommand.generateAssignment( + zkUtils, brokerSeq, reassignWrapper.generateReassignJsonString(), false); + List result = new ArrayList<>(); + result.add( + zkUtils.formatAsReassignmentJson( + (scala.collection.Map>) resultTuple2._2())); + result.add( + zkUtils.formatAsReassignmentJson( + (scala.collection.Map>) resultTuple2._1())); + + return result; + } + + public Map executeReassignPartition(String reassignStr) { + ReassignPartitionsCommand.executeAssignment(zkUtils, reassignStr); + return checkReassignStatus(reassignStr); + } + + public Map checkReassignStatus(String reassignStr) { + Map> partitionsToBeReassigned = + JavaConverters.mapAsJavaMapConverter(zkUtils.parsePartitionReassignmentData(reassignStr)) + .asJava(); + + Map> partitionsBeingReassigned = + JavaConverters.mapAsJavaMapConverter(zkUtils.getPartitionsBeingReassigned()) + .asJava() + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, data -> data.getValue().newReplicas())); + + java.util.Map reassignedPartitionsStatus = + partitionsToBeReassigned + .entrySet() + .stream() + .collect( + Collectors.toMap( + Map.Entry::getKey, + pbr -> + ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded( + zkUtils, + pbr.getKey(), + pbr.getValue(), + JavaConverters.mapAsScalaMapConverter(partitionsToBeReassigned) + .asScala(), + JavaConverters.mapAsScalaMapConverter(partitionsBeingReassigned) + .asScala()))); + + return reassignedPartitionsStatus + .entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, r -> r.getValue().status())); + } + + private Set listAllOldConsumerGroups() { + log.info("Finish getting old consumers"); + return CollectionConvertor.seqConvertJavaList(zkUtils.getConsumerGroups()) + .stream() + .collect(toSet()); + } + + private Set listOldConsumerGroupsByTopic(@TopicExistConstraint String topic) + throws Exception { + + List consumersFromZk = zkClient.getChildren().forPath(ZkUtils.ConsumersPath()); + Set cList = new HashSet<>(); + + for (String consumer : consumersFromZk) { + String path = ZkUtils.ConsumersPath() + "/" + consumer + "/offsets"; + if (zkClient.checkExists().forPath(path) != null) { + if (zkClient.getChildren().forPath(path).size() != 0) { + if (!Strings.isNullOrEmpty(topic)) { + if (zkClient.getChildren().forPath(path).stream().filter(p -> p.equals(topic)).count() + != 0) cList.add(consumer); + } else { + cList.add(consumer); + } } + } } - public Properties createTopicConfByKey(@TopicExistConstraint String topic, String key, String value) { - Properties props = new Properties(); - props.setProperty(key, value); - String validValue = String.valueOf(createTopicConf(topic, props).get(key)); - if (!validValue.equals("null") && validValue.equals(value)) { - return props; - } else { - throw new ApiException("Update Topic Config failed: " + key + ":" + value); - } - + return cList; + + // May cause keeperexception, deprecated + // return + // JavaConverters.asJavaCollectionConverter(zkUtils.getAllConsumerGroupsForTopic(topic)).asJavaCollection().stream().collect(toList()); + } + + private Set listAllNewConsumerGroups() { + AdminClient adminClient = kafkaUtils.createAdminClient(); + log.info("Calling the listAllConsumerGroupsFlattened"); + Set activeGroups = + CollectionConvertor.seqConvertJavaList(adminClient.listAllConsumerGroupsFlattened()) + .stream() + .map(GroupOverview::groupId) + .collect(toSet()); + log.info("Checking the groups in storage"); + Set usedTobeGroups = + storage.getMap().entrySet().stream().map(Map.Entry::getKey).collect(toSet()); + activeGroups.addAll(usedTobeGroups); + log.info("Finish getting new consumers"); + adminClient.close(); + return activeGroups; + } + + private Set listNewConsumerGroupsByTopic(@TopicExistConstraint String topic) { + Set result = new HashSet(); + Set consumersList = listAllNewConsumerGroups(); + + for (String c : consumersList) { + AdminClient adminClient = kafkaUtils.createAdminClient(); + + List consumerSummaryList = + CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(c)); + Set topicSet = + consumerSummaryList + .stream() + .flatMap(cs -> CollectionConvertor.listConvertJavaList(cs.assignment()).stream()) + .map(TopicPartition::topic) + .filter(t -> t.equals(topic)) + .distinct() + .collect(toSet()); + + if (topicSet.size() != 0) { + result.add(c); + } + adminClient.close(); } + return result; + } - public TopicMeta addPartition(@TopicExistConstraint String topic, AddPartition addPartition) { - List partitionMataData = AdminUtils.fetchTopicMetadataFromZk(topic, zkUtils).partitionMetadata(); - int numPartitions = partitionMataData.size(); - int numReplica = partitionMataData.get(0).replicas().size(); - List brokerIdList = listBrokers().stream().map(broker -> broker.getId()).collect(toList()); - List partitionIdList = partitionMataData.stream().map(p -> String.valueOf(p.partition())).collect(toList()); - String assignmentStr = addPartition.getReplicaAssignment(); - String toBeSetReplicaAssignmentStr = ""; - - if (assignmentStr != null && !assignmentStr.equals("")) { - //Check out of index ids in replica assignment string - String[] ids = addPartition.getReplicaAssignment().split(",|:"); - if (Arrays.stream(ids).filter(id -> brokerIdList.contains(id)).count() != 0) { - throw new InvalidTopicException("Topic " + topic + ": manual reassignment str has wrong id!"); - } - - //Check if any ids duplicated in one partition in replica assignment - String[] assignPartitions = addPartition.getReplicaAssignment().split(","); - if (Arrays.stream(assignPartitions).filter(p -> - Arrays.stream(p.split(":")).collect(Collectors.toSet()).size() - != p.split(":").length).count() - != 0) { - throw new InvalidTopicException("Topic " + topic + ": manual reassignment str has duplicated id in one partition!"); - } - - String replicaStr = Strings.repeat("0:", numReplica).replaceFirst(".$", ","); - toBeSetReplicaAssignmentStr = Strings.repeat(replicaStr, numPartitions) + addPartition.getReplicaAssignment(); - } else { - toBeSetReplicaAssignmentStr = ""; - } - - AdminUtils.addPartitions(zkUtils, topic, addPartition.getNumPartitionsAdded() + numPartitions, - toBeSetReplicaAssignmentStr, true, - RackAwareMode.Enforced$.MODULE$); - - return describeTopic(topic); + public List describeOldCGByTopic( + String consumerGroup, @TopicExistConstraint String topic) { + if (!isOldConsumerGroup(consumerGroup)) { + throw new RuntimeException(consumerGroup + " non-exist"); } - - //Return - public List generateReassignPartition(ReassignWrapper reassignWrapper) { - Seq brokerSeq = JavaConverters.asScalaBufferConverter(reassignWrapper.getBrokers()).asScala().toSeq(); - // - Tuple2 resultTuple2 = ReassignPartitionsCommand.generateAssignment(zkUtils, brokerSeq, reassignWrapper.generateReassignJsonString(), false); - List result = new ArrayList<>(); - result.add(zkUtils.formatAsReassignmentJson((scala.collection.Map>) resultTuple2._2())); - result.add(zkUtils.formatAsReassignmentJson((scala.collection.Map>) resultTuple2._1())); - - return result; + List cgdList = new ArrayList<>(); + Map fetchOffSetFromZKResultList = new HashMap<>(); + + List topicList = + CollectionConvertor.seqConvertJavaList(zkUtils.getTopicsByConsumerGroup(consumerGroup)); + if (topicList.size() == 0) { + log.info("No topic for the consumer group, nothing return"); + return null; } - public Map executeReassignPartition(String reassignStr) { - ReassignPartitionsCommand.executeAssignment( - zkUtils, - reassignStr - ); - return checkReassignStatus(reassignStr); + List topicPartitions = getTopicPartitions(topic); + ZKGroupTopicDirs groupDirs = new ZKGroupTopicDirs(consumerGroup, topic); + Map ownerPartitionMap = + topicPartitions + .stream() + .collect( + Collectors.toMap( + TopicAndPartition::partition, + tp -> { + Option owner = + zkUtils.readDataMaybeNull( + groupDirs.consumerOwnerDir() + "/" + tp.partition()) + ._1; + if (owner != NONE) { + return owner.get(); + } else { + return "none"; + } + })); + + ExecutorService executor = Executors.newCachedThreadPool(); + + List taskList = + topicPartitions + .stream() + .map( + tp -> + new FetchOffsetFromZKTask( + zookeeperUtils, tp.topic(), consumerGroup, tp.partition())) + .collect(toList()); + List> resultList = null; + + try { + resultList = executor.invokeAll(taskList); + } catch (InterruptedException e) { + e.printStackTrace(); } - public Map checkReassignStatus(String reassignStr) { - Map> partitionsToBeReassigned = JavaConverters.mapAsJavaMapConverter( - zkUtils.parsePartitionReassignmentData(reassignStr)).asJava(); + executor.shutdown(); - Map> partitionsBeingReassigned = JavaConverters.mapAsJavaMapConverter( - zkUtils.getPartitionsBeingReassigned()).asJava().entrySet().stream().collect( - Collectors.toMap( - Map.Entry::getKey, - data -> data.getValue().newReplicas() - )); - - - java.util.Map reassignedPartitionsStatus = - partitionsToBeReassigned.entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, - pbr -> ReassignPartitionsCommand.checkIfPartitionReassignmentSucceeded( - zkUtils, - pbr.getKey(), - pbr.getValue(), - JavaConverters.mapAsScalaMapConverter(partitionsToBeReassigned).asScala(), - JavaConverters.mapAsScalaMapConverter(partitionsBeingReassigned).asScala() - ) - )); - - - return reassignedPartitionsStatus.entrySet().stream().collect(Collectors.toMap( - Map.Entry::getKey, - r -> r.getValue().status() - )); + for (int i = 0; i < resultList.size(); i++) { + Future future = resultList.get(i); + try { + FetchOffSetFromZKResult offsetResult = future.get(); + fetchOffSetFromZKResultList.put(offsetResult.getParition(), offsetResult.getOffset()); + } catch (InterruptedException | ExecutionException e) { + e.printStackTrace(); + } } - private Set listAllOldConsumerGroups() { - log.info("Finish getting old consumers"); - return CollectionConvertor.seqConvertJavaList(zkUtils.getConsumerGroups()).stream().collect(toSet()); + log.info("Getting topic Metadata " + topic); + TopicMeta topicMeta = describeTopic(topic); + + cgdList.addAll( + setOldCGD(fetchOffSetFromZKResultList, ownerPartitionMap, topic, consumerGroup, topicMeta)); + Collections.sort(cgdList); + + return cgdList; + } + + private List setOldCGD( + Map fetchOffSetFromZKResultList, + Map ownerPartitionMap, + String topic, + String consumerGroup, + TopicMeta topicMeta) { + ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); + return ownerPartitionMap + .entrySet() + .stream() + .map( + op -> + factory.makeOldConsumerGroupDesc( + op, fetchOffSetFromZKResultList, topic, consumerGroup, topicMeta)) + .collect(toList()); + } + + public List describeNewCGByTopic(String consumerGroup, String topic) { + if (!isNewConsumerGroup(consumerGroup)) { + throw new RuntimeException(consumerGroup + " non-exist!"); } - private Set listOldConsumerGroupsByTopic(@TopicExistConstraint String topic) throws Exception { - - List consumersFromZk = zkClient.getChildren().forPath(ZkUtils.ConsumersPath()); - Set cList = new HashSet<>(); - - for (String consumer : consumersFromZk) { - String path = ZkUtils.ConsumersPath() + "/" + consumer + "/offsets"; - if (zkClient.checkExists().forPath(path) != null) { - if (zkClient.getChildren().forPath(path).size() != 0) { - if (!Strings.isNullOrEmpty(topic)) { - if (zkClient.getChildren().forPath(path).stream().filter(p -> p.equals(topic)).count() != 0) - cList.add(consumer); - } else { - cList.add(consumer); - } - } - } + return setNewCGD(consumerGroup, topic); + } + + private List setNewCGD(String consumerGroup, String topic) { + List cgdList = new ArrayList<>(); + AdminClient adminClient = kafkaUtils.createAdminClient(); + + List consumerSummaryList = + CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(consumerGroup)); + // Nothing about this consumer group obtained, return an empty map directly + adminClient.close(); + + List filteredCSList = + consumerSummaryList + .parallelStream() + .filter( + cs -> + CollectionConvertor.listConvertJavaList(cs.assignment()) + .parallelStream() + .filter(tp -> tp.topic().equals(topic)) + .count() + != 0) + .collect(toList()); + + // Prepare the common metrics no matter the cg is active or not. + + // 1. Get the meta information of the topic + TopicMeta topicMeta = describeTopic(topic); + + // 2. Get the log end offset for every partition + Map partitionEndOffsetMap = + topicMeta + .getTopicPartitionInfos() + .stream() + .collect(Collectors.toMap(tpi -> tpi.getPartitionId(), tpi -> tpi.getEndOffset())); + if (filteredCSList.size() == 0) { // For Pending consumer group + + // Even from the offsetstorage, nothing about this consumer group obtained + // In this case, return an empty map directly. + Map storageMap = storage.get(consumerGroup); + if (storageMap == null) { + return null; + } + + // Get the current offset of each partition in this topic. + Map topicStorage = new HashMap<>(); + for (Map.Entry e : storageMap.entrySet()) { + if (e.getKey().topicPartition().topic().equals(topic)) { + topicStorage.put(e.getKey(), e.getValue()); } - - return cList; - - //May cause keeperexception, deprecated - //return JavaConverters.asJavaCollectionConverter(zkUtils.getAllConsumerGroupsForTopic(topic)).asJavaCollection().stream().collect(toList()); + } + + // Build consumer group description + ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); + cgdList.addAll( + topicStorage + .entrySet() + .stream() + .map( + storage -> + factory.makeNewPendingConsumerGroupDesc( + consumerGroup, partitionEndOffsetMap, storage, topic)) + .collect(toList())); + + } else { // For running consumer group + // Build consumer group description + ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); + for (AdminClient.ConsumerSummary cs : filteredCSList) { + List assignment = CollectionConvertor.listConvertJavaList(cs.assignment()); + // Second get the current offset of each partition in this topic + + cgdList.addAll( + assignment + .parallelStream() + .filter(tp -> tp.topic().equals(topic)) + .map( + tp -> + factory.makeNewRunningConsumerGroupDesc( + tp, consumerGroup, partitionEndOffsetMap, cs)) + .collect(toList())); + } } - private Set listAllNewConsumerGroups() { - AdminClient adminClient = kafkaUtils.createAdminClient(); - log.info("Calling the listAllConsumerGroupsFlattened"); - Set activeGroups = CollectionConvertor.seqConvertJavaList(adminClient.listAllConsumerGroupsFlattened()).stream() - .map(GroupOverview::groupId).collect(toSet()); - log.info("Checking the groups in storage"); - Set usedTobeGroups = storage.getMap().entrySet().stream().map(Map.Entry::getKey).collect(toSet()); - activeGroups.addAll(usedTobeGroups); - log.info("Finish getting new consumers"); - adminClient.close(); - return activeGroups; + return cgdList; + } + + public String getMessage( + @TopicExistConstraint String topic, + int partition, + long offset, + String decoder, + String avroSchema) { + KafkaConsumer consumer = + kafkaUtils.createNewConsumer(String.valueOf(System.currentTimeMillis())); + TopicPartition tp = new TopicPartition(topic, partition); + long beginningOffset = getBeginningOffset(topic, partition); + long endOffset = getEndOffset(topic, partition); + if (beginningOffset == endOffset) { + throw new ApiException("There is no message in this partition of this topic"); } - - private Set listNewConsumerGroupsByTopic(@TopicExistConstraint String topic) { - Set result = new HashSet(); - Set consumersList = listAllNewConsumerGroups(); - - for (String c : consumersList) { - AdminClient adminClient = kafkaUtils.createAdminClient(); - - List consumerSummaryList = CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(c)); - Set topicSet = consumerSummaryList.stream() - .flatMap(cs -> CollectionConvertor.listConvertJavaList(cs.assignment()).stream()) - .map(TopicPartition::topic).filter(t -> t.equals(topic)).distinct() - .collect(toSet()); - - if (topicSet.size() != 0) { - result.add(c); - } - adminClient.close(); - } - return result; + if (offset < beginningOffset || offset >= endOffset) { + log.error(offset + " error"); + consumer.close(); + throw new ApiException( + "offsets must be between " + String.valueOf(beginningOffset + " and " + (endOffset - 1))); } - - public List describeOldCGByTopic(String consumerGroup, @TopicExistConstraint String topic) { - if (!isOldConsumerGroup(consumerGroup)) { - throw new RuntimeException(consumerGroup + " non-exist"); - } - List cgdList = new ArrayList<>(); - Map fetchOffSetFromZKResultList = new HashMap<>(); - - List topicList = CollectionConvertor.seqConvertJavaList(zkUtils.getTopicsByConsumerGroup(consumerGroup)); - if (topicList.size() == 0) { - log.info("No topic for the consumer group, nothing return"); - return null; + consumer.assign(Collections.singletonList(tp)); + consumer.seek(tp, offset); + + String last = null; + + // ConsumerRecords crs = consumer.poll(channelRetryBackoffMs); + ConsumerRecords crs = consumer.poll(3000); + log.info( + "Seek to offset:" + + offset + + ", topic:" + + topic + + ", partition:" + + partition + + ", crs.count:" + + crs.count()); + if (crs.count() != 0) { + Iterator> it = crs.iterator(); + while (it.hasNext()) { + ConsumerRecord initCr = it.next(); + last = "Value: " + initCr.value() + ", Offset: " + String.valueOf(initCr.offset()); + log.info( + "Value: " + initCr.value() + ", initCr.Offset: " + String.valueOf(initCr.offset())); + if (last != null && initCr.offset() == offset) { + break; } - - List topicPartitions = getTopicPartitions(topic); - ZKGroupTopicDirs groupDirs = new ZKGroupTopicDirs(consumerGroup, topic); - Map ownerPartitionMap = topicPartitions.stream().collect(Collectors.toMap( - TopicAndPartition::partition, - tp -> { - Option owner = zkUtils.readDataMaybeNull(groupDirs.consumerOwnerDir() + "/" + tp.partition())._1; - if (owner != NONE) { - return owner.get(); - } else { - return "none"; - } - } - ) - ); - - ExecutorService executor = Executors.newCachedThreadPool(); - - List taskList = topicPartitions.stream().map( - tp -> new FetchOffsetFromZKTask(zookeeperUtils, tp.topic(), consumerGroup, tp.partition())) - .collect(toList()); - List> resultList = null; - - try { - resultList = executor.invokeAll(taskList); - } catch (InterruptedException e) { - e.printStackTrace(); - } - - executor.shutdown(); - - for (int i = 0; i < resultList.size(); i++) { - Future future = resultList.get(i); - try { - FetchOffSetFromZKResult offsetResult = future.get(); - fetchOffSetFromZKResultList.put( - offsetResult.getParition(), - offsetResult.getOffset()); - } catch (InterruptedException | ExecutionException e) { - e.printStackTrace(); - } - } - - - log.info("Getting topic Metadata " + topic); - TopicMeta topicMeta = describeTopic(topic); - - cgdList.addAll(setOldCGD(fetchOffSetFromZKResultList, ownerPartitionMap, topic, consumerGroup, topicMeta)); - Collections.sort(cgdList); - - return cgdList; + } } - - private List setOldCGD( - Map fetchOffSetFromZKResultList, - Map ownerPartitionMap, - String topic, String consumerGroup, TopicMeta topicMeta) { - ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); - return ownerPartitionMap.entrySet().stream().map(op -> - factory.makeOldConsumerGroupDesc( - op, fetchOffSetFromZKResultList, topic, consumerGroup, topicMeta - ) - ) - .collect(toList()); + log.info("last:" + last); + consumer.close(); + return last; + } + + public GeneralResponse resetOffset( + @TopicExistConstraint String topic, + int partition, + String consumerGroup, + ConsumerType type, + String offset) { + KafkaConsumer consumer = null; + log.info("To tell the consumergroup " + consumerGroup + " is new"); + if (type != null && type == ConsumerType.NEW) { + if (!isNewConsumerGroup(consumerGroup)) { + throw new ApiException("Consumer group " + consumerGroup + " is non-exist!"); + } } - public List describeNewCGByTopic(String consumerGroup, - String topic) { - if (!isNewConsumerGroup(consumerGroup)) { - throw new RuntimeException(consumerGroup + " non-exist!"); - } - - return setNewCGD(consumerGroup, topic); + log.info("To tell the consumergroup " + consumerGroup + " is old"); + if (type != null && type == ConsumerType.OLD) { + if (!isOldConsumerGroup(consumerGroup)) { + throw new ApiException("Consumer group " + consumerGroup + " is non-exist!"); + } } - private List setNewCGD(String consumerGroup, String topic) { - List cgdList = new ArrayList<>(); - AdminClient adminClient = kafkaUtils.createAdminClient(); - - List consumerSummaryList = - CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(consumerGroup)); - //Nothing about this consumer group obtained, return an empty map directly - adminClient.close(); - - List filteredCSList = consumerSummaryList.parallelStream() - .filter(cs -> - CollectionConvertor.listConvertJavaList(cs.assignment()).parallelStream() - .filter(tp -> tp.topic().equals(topic)).count() != 0) - .collect(toList()); - - //Prepare the common metrics no matter the cg is active or not. - - //1. Get the meta information of the topic - TopicMeta topicMeta = describeTopic(topic); - - //2. Get the log end offset for every partition - Map partitionEndOffsetMap = topicMeta.getTopicPartitionInfos().stream() - .collect(Collectors.toMap( - tpi -> tpi.getPartitionId(), - tpi -> tpi.getEndOffset() - ) - ); - if (filteredCSList.size() == 0) {//For Pending consumer group - - //Even from the offsetstorage, nothing about this consumer group obtained - // In this case, return an empty map directly. - Map storageMap = storage.get(consumerGroup); - if (storageMap == null) { - return null; - } - - //Get the current offset of each partition in this topic. - Map topicStorage = new HashMap<>(); - for (Map.Entry e : storageMap.entrySet()) { - if (e.getKey().topicPartition().topic().equals(topic)) { - topicStorage.put(e.getKey(), e.getValue()); - } - } - - //Build consumer group description - ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); - cgdList.addAll( - topicStorage.entrySet().stream().map( - storage -> factory.makeNewPendingConsumerGroupDesc( - consumerGroup, - partitionEndOffsetMap, - storage, - topic) - ).collect(toList())); - - } else { //For running consumer group - //Build consumer group description - ConsumerGroupDescFactory factory = new ConsumerGroupDescFactory(kafkaUtils); - for (AdminClient.ConsumerSummary cs : filteredCSList) { - List assignment = CollectionConvertor.listConvertJavaList(cs.assignment()); - //Second get the current offset of each partition in this topic - - cgdList.addAll(assignment.parallelStream() - .filter(tp->tp.topic().equals(topic)) - .map(tp -> factory.makeNewRunningConsumerGroupDesc(tp, consumerGroup, partitionEndOffsetMap, cs) - ).collect(toList())); - } - } + long offsetToBeReset; + long beginningOffset = getBeginningOffset(topic, partition); + long endOffset = getEndOffset(topic, partition); - return cgdList; + log.info("To tell the consumergroup " + consumerGroup + " is active now"); + if (isConsumerGroupActive(consumerGroup, type)) { + throw new ApiException( + "Assignments can only be reset if the group " + consumerGroup + " is inactive"); } - - public String getMessage(@TopicExistConstraint String topic, int partition, long offset, String decoder, String avroSchema) { - KafkaConsumer consumer = kafkaUtils.createNewConsumer(); + if (type != null && type == ConsumerType.NEW && isNewConsumerGroup(consumerGroup)) { + try { + log.info("The consumergroup " + consumerGroup + " is new. Reset offset now"); + consumer = kafkaUtils.createNewConsumer(consumerGroup); + // if type is new or the consumergroup itself is new TopicPartition tp = new TopicPartition(topic, partition); - long beginningOffset = getBeginningOffset(topic, partition); - long endOffset = getEndOffset(topic, partition); - if (beginningOffset == endOffset) { - throw new ApiException("There is no message in this partition of this topic"); - } - if (offset < beginningOffset || offset >= endOffset) { + consumer.assign(Arrays.asList(tp)); + consumer.poll(channelSocketTimeoutMs); + if (offset.equals("earliest")) { + consumer.seekToBeginning(Arrays.asList(tp)); + log.info("Reset to" + consumer.position(tp)); + } else if (offset.equals("latest")) { + consumer.seekToEnd(Arrays.asList(tp)); + log.info("Reset to" + consumer.position(tp)); + } else { + if (Long.parseLong(offset) < beginningOffset || Long.parseLong(offset) > endOffset) { log.error(offset + " error"); - consumer.close(); throw new ApiException( - "offsets must be between " + String.valueOf(beginningOffset - + " and " + (endOffset - 1) - ) - ); - } - consumer.assign(Collections.singletonList(tp)); - consumer.seek(tp, offset); - - String last = null; - - ConsumerRecords crs = consumer.poll(channelRetryBackoffMs); - if (crs.count() != 0) { - Iterator> it = crs.iterator(); - while (it.hasNext()) { - ConsumerRecord initCr = it.next(); - last = "Value: " + initCr.value() + ", Offset: " + String.valueOf(initCr.offset()); - if (last != null && initCr.offset() == offset) { - break; - } - } + "offsets must be between " + + String.valueOf(beginningOffset + " and " + (endOffset - 1))); + } + offsetToBeReset = Long.parseLong(offset); + consumer.seek(tp, offsetToBeReset); } - + consumer.commitSync(); + } catch (IllegalStateException e) { + storage.getMap().remove(consumerGroup); + throw new ApiException(e); + } finally { consumer.close(); - return last; + } } - public GeneralResponse resetOffset(@TopicExistConstraint String topic, int partition, - String consumerGroup, - ConsumerType type, String offset) { - KafkaConsumer consumer = null; - log.info("To tell the consumergroup " + consumerGroup + " is new"); - if (type != null && type == ConsumerType.NEW) { - if (!isNewConsumerGroup(consumerGroup)) { - throw new ApiException("Consumer group " + consumerGroup + " is non-exist!"); - } - } - - log.info("To tell the consumergroup " + consumerGroup + " is old"); - if (type != null && type == ConsumerType.OLD) { - if (!isOldConsumerGroup(consumerGroup)) { - throw new ApiException("Consumer group " + consumerGroup + " is non-exist!"); - } - } - - long offsetToBeReset; - long beginningOffset = getBeginningOffset(topic, partition); - long endOffset = getEndOffset(topic, partition); - - log.info("To tell the consumergroup " + consumerGroup + " is active now"); - if (isConsumerGroupActive(consumerGroup, type)) { - throw new ApiException("Assignments can only be reset if the group " + consumerGroup + " is inactive"); - } - - - if (type != null && type == ConsumerType.NEW && isNewConsumerGroup(consumerGroup)) { - try { - log.info("The consumergroup " + consumerGroup + " is new. Reset offset now"); - consumer = kafkaUtils.createNewConsumer(consumerGroup); - //if type is new or the consumergroup itself is new - TopicPartition tp = new TopicPartition(topic, partition); - consumer.assign(Arrays.asList(tp)); - consumer.poll(channelSocketTimeoutMs); - if (offset.equals("earliest")) { - consumer.seekToBeginning(Arrays.asList(tp)); - log.info("Reset to" + consumer.position(tp)); - } else if (offset.equals("latest")) { - consumer.seekToEnd(Arrays.asList(tp)); - log.info("Reset to" + consumer.position(tp)); - } else { - if (Long.parseLong(offset) < beginningOffset || Long.parseLong(offset) > endOffset) { - log.error(offset + " error"); - throw new ApiException( - "offsets must be between " + String.valueOf(beginningOffset - + " and " + (endOffset - 1) - ) - ); - } - offsetToBeReset = Long.parseLong(offset); - consumer.seek(tp, offsetToBeReset); - } - consumer.commitSync(); - } catch (IllegalStateException e) { - storage.getMap().remove(consumerGroup); - throw new ApiException(e); - } finally { - consumer.close(); - } - } - - - //if type is old or the consumer group itself is old - if (type != null && type == ConsumerType.OLD && isOldConsumerGroup(consumerGroup)) { - log.info("The consumergroup " + consumerGroup + " is old. Reset offset now"); - if (offset.equals("earliest")) { - offset = String.valueOf(beginningOffset); - } else if (offset.equals("latest")) { - offset = String.valueOf(endOffset); - } - try { - if (Long.parseLong(offset) < beginningOffset || Long.parseLong(offset) > endOffset) { - log.info("Setting offset to " + offset + " error"); - throw new ApiException( - "offsets must be between " + String.valueOf(beginningOffset - + " and " + (endOffset - 1) - ) - ); - } - log.info("Offset will be reset to " + offset); - zkUtils.zkClient().writeData( - "/consumers/" + consumerGroup + "/offsets/" + topic + "/" + partition, - offset); - } catch (Exception e) { - throw new ApiException(e); - } + // if type is old or the consumer group itself is old + if (type != null && type == ConsumerType.OLD && isOldConsumerGroup(consumerGroup)) { + log.info("The consumergroup " + consumerGroup + " is old. Reset offset now"); + if (offset.equals("earliest")) { + offset = String.valueOf(beginningOffset); + } else if (offset.equals("latest")) { + offset = String.valueOf(endOffset); + } + try { + if (Long.parseLong(offset) < beginningOffset || Long.parseLong(offset) > endOffset) { + log.info("Setting offset to " + offset + " error"); + throw new ApiException( + "offsets must be between " + + String.valueOf(beginningOffset + " and " + (endOffset - 1))); } - return new GeneralResponse(GeneralResponseState.success, "Reset the offset successfully!"); + log.info("Offset will be reset to " + offset); + zkUtils + .zkClient() + .writeData( + "/consumers/" + consumerGroup + "/offsets/" + topic + "/" + partition, offset); + } catch (Exception e) { + throw new ApiException(e); + } } - - public Map> getLastCommitTime(@ConsumerGroupExistConstraint String consumerGroup, - @TopicExistConstraint String topic, - ConsumerType type) { - Map> result = new ConcurrentHashMap<>(); - - if (type != null && type == ConsumerType.OLD) { - //Get Old Consumer commit time - try { - Map oldConsumerOffsetMap = new ConcurrentHashMap<>(); - if (zkClient.checkExists().forPath(CONSUMERPATHPREFIX + consumerGroup) != null - && zkClient.checkExists().forPath(CONSUMERPATHPREFIX + consumerGroup + OFFSETSPATHPREFIX + topic) != null) { - List offsets = zkClient.getChildren().forPath(CONSUMERPATHPREFIX + consumerGroup + OFFSETSPATHPREFIX + topic); - for (String offset : offsets) { - Integer id = Integer.valueOf(offset); - long mtime = zkClient.checkExists().forPath(CONSUMERPATHPREFIX + consumerGroup + OFFSETSPATHPREFIX + topic + "/" + offset).getMtime(); - oldConsumerOffsetMap.put(id, mtime); - } - - result.put("old", oldConsumerOffsetMap); - } - } catch (Exception e) { - e.printStackTrace(); - } - - } else { - //Get New consumer commit time, from offset storage instance - if (storage.get(consumerGroup) != null) { - Map storageResult = storage.get(consumerGroup); - result.put("new", (storageResult.entrySet().parallelStream().filter(s -> s.getKey().topicPartition().topic().equals(topic)) - .collect( - Collectors.toMap( - s -> s.getKey().topicPartition().partition(), - s -> { - if (s.getValue() != null) { - return s.getValue().commitTimestamp(); - } else { - return -1l; - } - } - ) - ) - ) - ); - } - + return new GeneralResponse(GeneralResponseState.success, "Reset the offset successfully!"); + } + + public Map> getLastCommitTime( + @ConsumerGroupExistConstraint String consumerGroup, + @TopicExistConstraint String topic, + ConsumerType type) { + Map> result = new ConcurrentHashMap<>(); + + if (type != null && type == ConsumerType.OLD) { + // Get Old Consumer commit time + try { + Map oldConsumerOffsetMap = new ConcurrentHashMap<>(); + if (zkClient.checkExists().forPath(CONSUMERPATHPREFIX + consumerGroup) != null + && zkClient + .checkExists() + .forPath(CONSUMERPATHPREFIX + consumerGroup + OFFSETSPATHPREFIX + topic) + != null) { + List offsets = + zkClient + .getChildren() + .forPath(CONSUMERPATHPREFIX + consumerGroup + OFFSETSPATHPREFIX + topic); + for (String offset : offsets) { + Integer id = Integer.valueOf(offset); + long mtime = + zkClient + .checkExists() + .forPath( + CONSUMERPATHPREFIX + + consumerGroup + + OFFSETSPATHPREFIX + + topic + + "/" + + offset) + .getMtime(); + oldConsumerOffsetMap.put(id, mtime); + } + + result.put("old", oldConsumerOffsetMap); } - - return result; + } catch (Exception e) { + e.printStackTrace(); + } + + } else { + // Get New consumer commit time, from offset storage instance + if (storage.get(consumerGroup) != null) { + Map storageResult = storage.get(consumerGroup); + result.put( + "new", + (storageResult + .entrySet() + .parallelStream() + .filter(s -> s.getKey().topicPartition().topic().equals(topic)) + .collect( + Collectors.toMap( + s -> s.getKey().topicPartition().partition(), + s -> { + if (s.getValue() != null) { + return s.getValue().commitTimestamp(); + } else { + return -1l; + } + })))); + } } - public GeneralResponse deleteConsumerGroup(String consumerGroup) { - if (!isOldConsumerGroup(consumerGroup)) { - throw new RuntimeException(consumerGroup + " non-exist"); - } - try { - if (zookeeperUtils.getCuratorClient().checkExists().forPath(CONSUMERPATHPREFIX + consumerGroup + "/ids") == null) { - zookeeperUtils.getCuratorClient().delete().deletingChildrenIfNeeded().forPath(CONSUMERPATHPREFIX + consumerGroup); - } else { - if (!AdminUtils.deleteConsumerGroupInZK(zkUtils, consumerGroup)) { - throw new ApiException(consumerGroup + " has not been deleted for some reason"); - } - } - } catch (Exception e) { - throw new RuntimeException(e); - } + return result; + } - return new GeneralResponse(GeneralResponseState.success, consumerGroup + " has been deleted."); + public GeneralResponse deleteConsumerGroup(String consumerGroup) { + if (!isOldConsumerGroup(consumerGroup)) { + throw new RuntimeException(consumerGroup + " non-exist"); } - - public Map> listAllConsumerGroups(ConsumerType type) { - Map> result = new HashMap<>(); - - if (type == null || type == ConsumerType.OLD) { - Set oldCGList = listAllOldConsumerGroups(); - if (oldCGList.size() != 0) { - result.put("old", oldCGList); - } + try { + if (zookeeperUtils + .getCuratorClient() + .checkExists() + .forPath(CONSUMERPATHPREFIX + consumerGroup + "/ids") + == null) { + zookeeperUtils + .getCuratorClient() + .delete() + .deletingChildrenIfNeeded() + .forPath(CONSUMERPATHPREFIX + consumerGroup); + } else { + if (!AdminUtils.deleteConsumerGroupInZK(zkUtils, consumerGroup)) { + throw new ApiException( + "The consumer " + consumerGroup + " is still active.Please stop it first"); } - - if (type == null || type == ConsumerType.NEW) { - Set newCGList = listAllNewConsumerGroups(); - if (newCGList.size() != 0) { - result.put("new", newCGList); - } - } - - - return result; + } + } catch (Exception e) { + throw new RuntimeException(e); } - public Map> listConsumerGroupsByTopic( - @TopicExistConstraint String topic, - ConsumerType type) { - Map> result = new HashMap<>(); - - if (type == null || type == ConsumerType.OLD) { - Set oldCGList = null; - try { - oldCGList = listOldConsumerGroupsByTopic(topic); - } catch (Exception e) { - e.printStackTrace(); - } - if (oldCGList.size() != 0) { - result.put("old", oldCGList); - } - } - - if (type == null || type == ConsumerType.NEW) { - Set newCGList = listNewConsumerGroupsByTopic(topic); + return new GeneralResponse(GeneralResponseState.success, consumerGroup + " has been deleted."); + } - if (newCGList.size() != 0) { - result.put("new", newCGList); - } - } + public Map> listAllConsumerGroups(ConsumerType type) { + Map> result = new HashMap<>(); - return result; + if (type == null || type == ConsumerType.OLD) { + Set oldCGList = listAllOldConsumerGroups(); + if (oldCGList.size() != 0) { + result.put("old", oldCGList); + } } - private List getTopicPartitions(String t) { - List tpList = new ArrayList<>(); - List l = Arrays.asList(t); - java.util.Map> tpMap = JavaConverters.mapAsJavaMapConverter(zkUtils.getPartitionsForTopics(JavaConverters.asScalaIteratorConverter(l.iterator()).asScala().toSeq())).asJava(); - if (tpMap != null) { - ArrayList partitionLists = new ArrayList<>(JavaConverters.seqAsJavaListConverter(tpMap.get(t)).asJava()); - tpList = partitionLists.stream().map(p -> new TopicAndPartition(t, (Integer) p)).collect(toList()); - } - return tpList; + if (type == null || type == ConsumerType.NEW) { + Set newCGList = listAllNewConsumerGroups(); + if (newCGList.size() != 0) { + result.put("new", newCGList); + } } - private Properties getTopicPropsFromZk(String topic) { - return AdminUtils.fetchEntityConfig(zkUtils, ConfigType.Topic(), topic); + return result; + } + + public Map> listConsumerGroupsByTopic( + @TopicExistConstraint String topic, ConsumerType type) { + Map> result = new HashMap<>(); + + if (type == null || type == ConsumerType.OLD) { + Set oldCGList = null; + try { + oldCGList = listOldConsumerGroupsByTopic(topic); + } catch (Exception e) { + e.printStackTrace(); + } + if (oldCGList.size() != 0) { + result.put("old", oldCGList); + } } + if (type == null || type == ConsumerType.NEW) { + Set newCGList = listNewConsumerGroupsByTopic(topic); - private long getOffsets(Node leader, String topic, int partitionId, long time) { - TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partitionId); - - SimpleConsumer consumer = new SimpleConsumer( - leader.host(), - leader.port(), - 10000, - 1024, - "Kafka-zk-simpleconsumer" - ); - - PartitionOffsetRequestInfo partitionOffsetRequestInfo = new PartitionOffsetRequestInfo(time, 10000); - OffsetRequest offsetRequest = new OffsetRequest(ImmutableMap.of(topicAndPartition, partitionOffsetRequestInfo), kafka.api.OffsetRequest.CurrentVersion(), consumer.clientId()); - OffsetResponse offsetResponse = consumer.getOffsetsBefore(offsetRequest); - - if (offsetResponse.hasError()) { - short errorCode = offsetResponse.errorCode(topic, partitionId); - log.warn(format("Offset response has error: %d", errorCode)); - throw new ApiException("could not fetch data from Kafka, error code is '" + errorCode + "'Exception Message:" + offsetResponse.toString()); - } - - long[] offsets = offsetResponse.offsets(topic, partitionId); - consumer.close(); - return offsets[0]; + if (newCGList.size() != 0) { + result.put("new", newCGList); + } } - private long getOffsets(PartitionInfo partitionInfo, long time) { - return getOffsets(partitionInfo.leader(), partitionInfo.topic(), partitionInfo.partition(), time); + return result; + } + + private List getTopicPartitions(String t) { + List tpList = new ArrayList<>(); + List l = Arrays.asList(t); + java.util.Map> tpMap = + JavaConverters.mapAsJavaMapConverter( + zkUtils.getPartitionsForTopics( + JavaConverters.asScalaIteratorConverter(l.iterator()).asScala().toSeq())) + .asJava(); + if (tpMap != null) { + ArrayList partitionLists = + new ArrayList<>(JavaConverters.seqAsJavaListConverter(tpMap.get(t)).asJava()); + tpList = + partitionLists.stream().map(p -> new TopicAndPartition(t, (Integer) p)).collect(toList()); } - - public long getBeginningOffset(String topic, int partitionId) { - return getOffsets(kafkaUtils.getLeader(topic, partitionId), topic, partitionId, kafka.api.OffsetRequest.EarliestTime()); + return tpList; + } + + private Properties getTopicPropsFromZk(String topic) { + return AdminUtils.fetchEntityConfig(zkUtils, ConfigType.Topic(), topic); + } + + private long getOffsets(Node leader, String topic, int partitionId, long time) { + TopicAndPartition topicAndPartition = new TopicAndPartition(topic, partitionId); + + SimpleConsumer consumer = + new SimpleConsumer(leader.host(), leader.port(), 10000, 1024, "Kafka-zk-simpleconsumer"); + + PartitionOffsetRequestInfo partitionOffsetRequestInfo = + new PartitionOffsetRequestInfo(time, 10000); + OffsetRequest offsetRequest = + new OffsetRequest( + ImmutableMap.of(topicAndPartition, partitionOffsetRequestInfo), + kafka.api.OffsetRequest.CurrentVersion(), + consumer.clientId()); + OffsetResponse offsetResponse = consumer.getOffsetsBefore(offsetRequest); + + if (offsetResponse.hasError()) { + short errorCode = offsetResponse.errorCode(topic, partitionId); + log.warn(format("Offset response has error: %d", errorCode)); + throw new ApiException( + "could not fetch data from Kafka, error code is '" + + errorCode + + "'Exception Message:" + + offsetResponse.toString()); } - public long getEndOffset(String topic, int partitionId) { - return getOffsets(kafkaUtils.getLeader(topic, partitionId), topic, partitionId, kafka.api.OffsetRequest.LatestTime()); + long[] offsets = offsetResponse.offsets(topic, partitionId); + consumer.close(); + return offsets[0]; + } + + private long getOffsets(PartitionInfo partitionInfo, long time) { + return getOffsets( + partitionInfo.leader(), partitionInfo.topic(), partitionInfo.partition(), time); + } + + public long getBeginningOffset(String topic, int partitionId) { + return getOffsets( + kafkaUtils.getLeader(topic, partitionId), + topic, + partitionId, + kafka.api.OffsetRequest.EarliestTime()); + } + + public long getEndOffset(String topic, int partitionId) { + return getOffsets( + kafkaUtils.getLeader(topic, partitionId), + topic, + partitionId, + kafka.api.OffsetRequest.LatestTime()); + } + + private long getBeginningOffset(Node leader, String topic, int partitionId) { + return getOffsets(leader, topic, partitionId, kafka.api.OffsetRequest.EarliestTime()); + } + + private long getEndOffset(Node leader, String topic, int partitionId) { + return getOffsets(leader, topic, partitionId, kafka.api.OffsetRequest.LatestTime()); + } + + public boolean isOldConsumerGroup(String consumerGroup) { + return listAllOldConsumerGroups().contains(consumerGroup); + } + + public boolean isNewConsumerGroup(String consumerGroup) { + // Active Consumergroup or Dead ConsumerGroup is OK + return (listAllNewConsumerGroups().contains(consumerGroup)); + } + + public Set listTopicsByCG(String consumerGroup, ConsumerType type) { + Set topicList = new HashSet<>(); + + if (type == null) { + throw new ApiException("Unknown Type " + type); } - private long getBeginningOffset(Node leader, String topic, int partitionId) { - return getOffsets(leader, topic, partitionId, kafka.api.OffsetRequest.EarliestTime()); - } + if (type == ConsumerType.OLD) { + if (!isOldConsumerGroup(consumerGroup)) { + throw new RuntimeException(consumerGroup + " non-exist"); + } + + topicList = + new HashSet<>( + CollectionConvertor.seqConvertJavaList( + zkUtils.getTopicsByConsumerGroup(consumerGroup))); + } else if (type == ConsumerType.NEW) { + if (!isNewConsumerGroup(consumerGroup)) { + throw new RuntimeException(consumerGroup + " non-exist!"); + } + + AdminClient adminClient = kafkaUtils.createAdminClient(); + + List consumerSummaryList = + CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(consumerGroup)); + // Nothing about this consumer group obtained, return an empty map directly + adminClient.close(); + + if (isConsumerGroupActive(consumerGroup, ConsumerType.NEW) + && consumerSummaryList.size() != 0) { + + // Get topic list and filter if topic is set + topicList.addAll( + consumerSummaryList + .stream() + .flatMap(cs -> CollectionConvertor.listConvertJavaList(cs.assignment()).stream()) + .map(tp -> tp.topic()) + .distinct() + .collect(toList())); + } + + if (consumerSummaryList.size() == 0) { // PENDING Consumer Group + Map storageMap = storage.get(consumerGroup); + if (storageMap == null) { + return null; + } - private long getEndOffset(Node leader, String topic, int partitionId) { - return getOffsets(leader, topic, partitionId, kafka.api.OffsetRequest.LatestTime()); + // Fetch the topics involved by consumer. And filter it by topic name + topicList.addAll( + storageMap + .entrySet() + .stream() + .map(e -> e.getKey().topicPartition().topic()) + .distinct() + .collect(toList())); + } + } else { + throw new ApiException("Unknown Type " + type); } + return topicList; + } - public boolean isOldConsumerGroup(String consumerGroup) { - return listAllOldConsumerGroups().contains(consumerGroup); + public Map> describeConsumerGroup( + String consumerGroup, ConsumerType type) { + Map> result = new HashMap<>(); + Set topicList = listTopicsByCG(consumerGroup, type); + if (topicList == null) { + // Return empty result + return result; } - - public boolean isNewConsumerGroup(String consumerGroup) { - //Active Consumergroup or Dead ConsumerGroup is OK - return (listAllNewConsumerGroups().contains(consumerGroup)); + if (type == ConsumerType.NEW) { + for (String topic : topicList) { + result.put(topic, describeNewCGByTopic(consumerGroup, topic)); + } + + } else if (type == ConsumerType.OLD) { + for (String topic : topicList) { + result.put(topic, describeOldCGByTopic(consumerGroup, topic)); + } } - - public Set listTopicsByCG(String consumerGroup, ConsumerType type) { - Set topicList = new HashSet<>(); - - if (type == null) { - throw new ApiException("Unknown Type " + type); - } - - if (type == ConsumerType.OLD) { - if (!isOldConsumerGroup(consumerGroup)) { - throw new RuntimeException(consumerGroup + " non-exist"); - } - - topicList = new HashSet<>( - CollectionConvertor.seqConvertJavaList(zkUtils.getTopicsByConsumerGroup(consumerGroup)) - ); - } else if (type == ConsumerType.NEW) { - if (!isNewConsumerGroup(consumerGroup)) { - throw new RuntimeException(consumerGroup + " non-exist!"); - } - - AdminClient adminClient = kafkaUtils.createAdminClient(); - - List consumerSummaryList = - CollectionConvertor.listConvertJavaList(adminClient.describeConsumerGroup(consumerGroup)); - //Nothing about this consumer group obtained, return an empty map directly - adminClient.close(); - - if (isConsumerGroupActive(consumerGroup, ConsumerType.NEW) && - consumerSummaryList.size() != 0) { - - //Get topic list and filter if topic is set - topicList.addAll(consumerSummaryList.stream().flatMap( - cs -> CollectionConvertor.listConvertJavaList(cs.assignment()).stream()) - .map(tp -> tp.topic()).distinct() - .collect(toList())); - } - - if (consumerSummaryList.size() == 0) { //PENDING Consumer Group - Map storageMap = storage.get(consumerGroup); - if (storageMap == null) { - return null; - } - - //Fetch the topics involved by consumer. And filter it by topic name - topicList.addAll(storageMap.entrySet().stream() - .map(e -> e.getKey().topicPartition().topic()).distinct() - .collect(toList())); - } - } else { - throw new ApiException("Unknown Type " + type); - } - - return topicList; - + return result; + } + + public Map countPartition(String topic) { + KafkaConsumer consumer = kafkaUtils.createNewConsumer(); + List piList = consumer.partitionsFor(topic); + Map result = + piList + .stream() + .flatMap(pi -> Arrays.stream(pi.replicas())) + .map(node -> node.id()) + .collect(Collectors.groupingBy(Function.identity(), Collectors.counting())); + + consumer.close(); + + return result; + } + + private boolean isConsumerGroupActive(String consumerGroup, ConsumerType type) { + if (type == ConsumerType.NEW) { + AdminClient adminClient = kafkaUtils.createAdminClient(); + boolean isActive = + CollectionConvertor.seqConvertJavaList(adminClient.listAllConsumerGroupsFlattened()) + .stream() + .map(GroupOverview::groupId) + .filter(c -> c.equals(consumerGroup)) + .count() + == 1; + adminClient.close(); + return isActive; + } else if (type == ConsumerType.OLD) { + return AdminUtils.isConsumerGroupActive(zookeeperUtils.getZkUtils(), consumerGroup); + } else { + throw new ApiException("Unknown type " + type); } - - public Map> describeConsumerGroup(String consumerGroup, ConsumerType type) { - Map> result = new HashMap<>(); - Set topicList = listTopicsByCG(consumerGroup, type); - if (topicList == null) { - //Return empty result - return result; - } - if (type == ConsumerType.NEW) { - for (String topic : topicList) { - result.put(topic, describeNewCGByTopic(consumerGroup, topic)); - } - - } else if (type == ConsumerType.OLD) { - for (String topic : topicList) { - result.put(topic, describeOldCGByTopic(consumerGroup, topic)); - } - } - - return result; + } + + public HealthCheckResult healthCheck() { + String healthCheckTopic = kafkaConfig.getHealthCheckTopic(); + HealthCheckResult healthCheckResult = new HealthCheckResult(); + KafkaProducer producer = kafkaUtils.createProducer(); + KafkaConsumer consumer = kafkaUtils.createNewConsumerByTopic(healthCheckTopic); + + boolean healthCheckTopicExist = existTopic(healthCheckTopic); + log.info("HealthCheckTopic:" + healthCheckTopic + " existed:" + healthCheckTopicExist); + if (!healthCheckTopicExist) { + healthCheckResult.setStatus("unknown"); + healthCheckResult.setMsg( + "HealthCheckTopic: " + + healthCheckTopic + + " Non-Exist. Please create it before doing health check."); + return healthCheckResult; } - public Map countPartition(String topic) { - KafkaConsumer consumer = kafkaUtils.createNewConsumer(); - List piList = consumer.partitionsFor(topic); - Map result = piList.stream().flatMap(pi -> Arrays.stream(pi.replicas())) - .map(node -> node.id()).collect(Collectors.groupingBy( - Function.identity(), Collectors.counting() - )); - - consumer.close(); - - return result; + String message = "healthcheck_" + System.currentTimeMillis(); + ProducerRecord record = new ProducerRecord(healthCheckTopic, null, message); + log.info("Generate message:" + message); + try { + RecordMetadata recordMetadata = (RecordMetadata) producer.send(record).get(); + log.info("Message:" + message + " has been sent to Partition:" + recordMetadata.partition()); + } catch (Exception e) { + healthCheckResult.setStatus("error"); + healthCheckResult.setMsg( + "Health Check: Produce Message Failure. Exception: " + e.getMessage()); + log.error("Health Check: Produce Message Failure.", e); + return healthCheckResult; + } finally { + producer.close(); } - private boolean isConsumerGroupActive(String consumerGroup, ConsumerType type) { - if (type == ConsumerType.NEW) { - AdminClient adminClient = kafkaUtils.createAdminClient(); - boolean isActive = CollectionConvertor.seqConvertJavaList(adminClient.listAllConsumerGroupsFlattened()).stream() - .map(GroupOverview::groupId).filter(c -> c.equals(consumerGroup)).count() == 1; - adminClient.close(); - return isActive; - } else if (type == ConsumerType.OLD) { - return AdminUtils.isConsumerGroupActive(zookeeperUtils.getZkUtils(), consumerGroup); - } else { - throw new ApiException("Unknown type " + type); + int retries = 30; + int noRecordsCount = 0; + while (true) { + final ConsumerRecords consumerRecords = consumer.poll(1000); + if (consumerRecords.count() == 0) { + noRecordsCount++; + if (noRecordsCount > retries) break; + else continue; + } + Iterator> iterator = consumerRecords.iterator(); + while (iterator.hasNext()) { + ConsumerRecord msg = iterator.next(); + log.info("Health Check: Fetch Message " + msg.value() + ", offset:" + msg.offset()); + if (msg.value().equals(message)) { + healthCheckResult.setStatus("ok"); + healthCheckResult.setMsg(message); + return healthCheckResult; } + } + consumer.commitAsync(); + } + consumer.close(); + + if (healthCheckResult.getStatus() == null) { + healthCheckResult.setStatus("error"); + healthCheckResult.setMsg( + "Health Check: Consume Message Failure. Consumer can't fetch the message."); } + return healthCheckResult; + } } diff --git a/src/main/java/org/gnuhpc/bigdata/service/UserService.java b/src/main/java/org/gnuhpc/bigdata/service/UserService.java new file mode 100644 index 0000000..6f4fe64 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/service/UserService.java @@ -0,0 +1,108 @@ +package org.gnuhpc.bigdata.service; + +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j; +import org.gnuhpc.bigdata.config.WebSecurityConfig; +import org.gnuhpc.bigdata.constant.GeneralResponseState; +import org.gnuhpc.bigdata.model.GeneralResponse; +import org.gnuhpc.bigdata.model.User; +import org.gnuhpc.bigdata.utils.CommonUtils; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.util.*; + +@Getter +@Setter +@Log4j +@Service +public class UserService { + private HashMap accounts; + + public List listUser() { + List userList = new ArrayList<>(); + try { + accounts = CommonUtils.yamlParse(WebSecurityConfig.SECURITY_FILE_PATH); + accounts.forEach((username, value)->{ + userList.add((String)username); + }); + } catch (IOException ioException) { + log.error("Failed to get user list. Reason : " + ioException.getLocalizedMessage()); + } + + return userList; + } + + public GeneralResponse addUser(User user) { + String username = user.getUsername(); + try { + boolean exist = checkUserExist(username); + if (!exist) { + return saveUserInfo(user); + } else { + log.info("Failed to add user. Reason : User " + username + " already exists."); + return new GeneralResponse(GeneralResponseState.failure, "Failed to add user. Reason : User " + username + " already exists."); + } + } catch (IOException ioException) { + log.error("Failed to add user " + username + ". Reason : " + ioException.getLocalizedMessage()); + return new GeneralResponse(GeneralResponseState.failure, + "Failed to add user " + username + ". Reason : " + ioException.getLocalizedMessage()); + } + } + + public GeneralResponse modifyUser(User user) { + String username = user.getUsername(); + try { + boolean exist = checkUserExist(username); + if (exist) { + return saveUserInfo(user); + } else { + log.info("Failed to modify user. Reason : User " + username + " does not exist."); + return new GeneralResponse(GeneralResponseState.failure, "Failed to modify user. Reason : User " + username + " does not exist."); + } + } catch (IOException ioException) { + log.error("Failed to modify user " + username + ". Reason : " + ioException.getLocalizedMessage()); + return new GeneralResponse(GeneralResponseState.failure, + "Failed to modify user " + username + ". Reason : " + ioException.getLocalizedMessage()); + } + } + + public GeneralResponse delUser(String username) { + try { + boolean exist = checkUserExist(username); + if (exist) { + accounts.remove(username); + CommonUtils.yamlWrite(WebSecurityConfig.SECURITY_FILE_PATH, accounts); + return new GeneralResponse(GeneralResponseState.success, "Delete user " + username + " successfully."); + } else { + log.info("Failed to delete user. Reason : User " + username + " does not exist."); + return new GeneralResponse(GeneralResponseState.failure, "Failed to delete user. Reason : User " + username + " does not exist."); + } + } catch (IOException ioException) { + log.error("Failed to delete user " + username + ". Reason : " + ioException.getLocalizedMessage()); + return new GeneralResponse(GeneralResponseState.failure, + "Failed to delete user " + username + ". Reason : " + ioException.getLocalizedMessage()); + } + } + + public boolean checkUserExist(String username) throws IOException { + accounts = CommonUtils.yamlParse(WebSecurityConfig.SECURITY_FILE_PATH); + if (accounts.containsKey(username)) { + return true; + } + return false; + } + + public GeneralResponse saveUserInfo(User user) throws IOException { + String username = user.getUsername(); + String encodedPassword = CommonUtils.encode(user.getPassword()); + HashMap userInfo = new HashMap<>(); + + userInfo.put("password", encodedPassword); + userInfo.put("role", user.getRole()); + accounts.put(username, userInfo); + CommonUtils.yamlWrite(WebSecurityConfig.SECURITY_FILE_PATH, accounts); + return new GeneralResponse(GeneralResponseState.success, "Save user " + username + " info successfully."); + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/service/ZookeeperService.java b/src/main/java/org/gnuhpc/bigdata/service/ZookeeperService.java index 67e12f3..178cc0f 100644 --- a/src/main/java/org/gnuhpc/bigdata/service/ZookeeperService.java +++ b/src/main/java/org/gnuhpc/bigdata/service/ZookeeperService.java @@ -1,8 +1,11 @@ package org.gnuhpc.bigdata.service; import com.google.common.net.HostAndPort; +import java.util.HashMap; +import java.util.List; import lombok.extern.log4j.Log4j; import org.gnuhpc.bigdata.constant.ZkServerCommand; +import org.gnuhpc.bigdata.exception.ServiceNotAvailableException; import org.gnuhpc.bigdata.model.ZkServerEnvironment; import org.gnuhpc.bigdata.model.ZkServerStat; import org.gnuhpc.bigdata.utils.ZookeeperUtils; @@ -15,36 +18,39 @@ @Service @Log4j public class ZookeeperService { - @Autowired - private ZookeeperUtils zookeeperUtils; - public Map stat() { - return zookeeperUtils.getZookeeperConfig().getHostAndPort().stream() - .collect(Collectors.toMap( - hp -> hp, - hp -> zookeeperUtils.parseStatResult( - zookeeperUtils.executeCommand( - hp.getHostText(), - hp.getPort(), - ZkServerCommand.stat.toString() - ) - ) - )); - } + @Autowired + private ZookeeperUtils zookeeperUtils; - public Map environment() { - return zookeeperUtils.getZookeeperConfig().getHostAndPort().stream() - .collect(Collectors.toMap( - hp -> hp, - hp -> zookeeperUtils.parseEnvResult( - zookeeperUtils.executeCommand( - hp.getHostText(), - hp.getPort(), - ZkServerCommand.envi.toString() - ) - ) - )); + public Map stat() { + List hostAndPortList = zookeeperUtils.getZookeeperConfig().getHostAndPort(); + Map result = new HashMap<>(); + for (int i = 0; i < hostAndPortList.size(); i++) { + HostAndPort hp = hostAndPortList.get(i); + try { + result.put(hp, zookeeperUtils.parseStatResult(zookeeperUtils + .executeCommand(hp.getHostText(), hp.getPort(), ZkServerCommand.stat.toString()))); + } catch (ServiceNotAvailableException serviceNotAvailbleException) { + log.warn("Execute " + ZkServerCommand.stat.toString() + " command failed. Exception:" + + serviceNotAvailbleException); + } } + return result; + } - + public Map environment() { + List hostAndPortList = zookeeperUtils.getZookeeperConfig().getHostAndPort(); + Map result = new HashMap<>(); + for (int i = 0; i < hostAndPortList.size(); i++) { + HostAndPort hp = hostAndPortList.get(i); + try { + result.put(hp, zookeeperUtils.parseEnvResult(zookeeperUtils + .executeCommand(hp.getHostText(), hp.getPort(), ZkServerCommand.envi.toString()))); + } catch (ServiceNotAvailableException serviceNotAvailbleException) { + log.warn("Execute " + ZkServerCommand.envi.toString() + " command failed. Exception:" + + serviceNotAvailbleException); + } + } + return result; + } } diff --git a/src/main/java/org/gnuhpc/bigdata/utils/CommonUtils.java b/src/main/java/org/gnuhpc/bigdata/utils/CommonUtils.java new file mode 100644 index 0000000..e7b9dc8 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/utils/CommonUtils.java @@ -0,0 +1,66 @@ +package org.gnuhpc.bigdata.utils; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; +import lombok.Getter; +import lombok.Setter; +import lombok.extern.log4j.Log4j; +import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder; + +import java.io.File; +import java.io.IOException; +import java.util.HashMap; + +@Log4j +@Getter +@Setter +public class CommonUtils { + public static final String PROJECT_ROOT_FOLDER = CommonUtils.getProjectRootPath(); + + public static String encode(CharSequence rawPassword) { + return new BCryptPasswordEncoder().encode(rawPassword); + } + + public static String getProjectRootPath() { + String workingDir = System.getProperty("user.dir"); + log.info("Current working directory : " + workingDir); + return workingDir; + } + + public static HashMap yamlParse(String filePath) throws IOException { + ObjectMapper mapperForYAML = new ObjectMapper(new YAMLFactory()); + File file = new File(filePath); + HashMap yamlHash = new HashMap<>(); + yamlHash = mapperForYAML.readValue(file, yamlHash.getClass()); + + return yamlHash; + } + + public static HashMap yamlParse(File file) throws IOException { + ObjectMapper mapperForYAML = new ObjectMapper(new YAMLFactory()); + HashMap yamlHash = new HashMap<>(); + yamlHash = mapperForYAML.readValue(file, yamlHash.getClass()); + + return yamlHash; + } + + public static void yamlWrite(String filePath, Object object) throws IOException { + File file = new File(filePath); + ObjectMapper mapperForYAML = new ObjectMapper(new YAMLFactory()); + mapperForYAML.writeValue(file, object); + } + + public static void yamlWrite(File file, Object object) throws IOException { + ObjectMapper mapperForYAML = new ObjectMapper(new YAMLFactory()); + mapperForYAML.writeValue(file, object); + } + + public static void main(String[] args) throws IOException { + /* + String rawPassword = "admin"; + String encodedPassword = CommonUtils.encode(rawPassword); + System.out.println("rawPassword:" + rawPassword + ", encodedPassword:" + encodedPassword); + System.out.println("workingDir:" + CommonUtils.PROJECT_ROOT_FOLDER); + */ + } +} diff --git a/src/main/java/org/gnuhpc/bigdata/utils/KafkaUtils.java b/src/main/java/org/gnuhpc/bigdata/utils/KafkaUtils.java index 8b73604..43c6ee2 100644 --- a/src/main/java/org/gnuhpc/bigdata/utils/KafkaUtils.java +++ b/src/main/java/org/gnuhpc/bigdata/utils/KafkaUtils.java @@ -9,6 +9,7 @@ import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.Node; import org.apache.kafka.common.PartitionInfo; import org.apache.kafka.common.errors.ApiException; @@ -20,6 +21,7 @@ import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Configuration; +import java.util.Collections; import java.util.List; import java.util.Properties; @@ -45,10 +47,10 @@ public class KafkaUtils { public void init(){ prop = new Properties(); - prop.setProperty("bootstrap.servers",kafkaConfig.getBrokers()); - prop.setProperty("key.serializer", + prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getBrokers()); + prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); - prop.setProperty("value.serializer", + prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); producer = new KafkaProducer(prop); log.info("Kafka initing..."); @@ -71,7 +73,8 @@ public KafkaConsumer createNewConsumer(String consumerGroup) { properties.put(ConsumerConfig.GROUP_ID_CONFIG, consumerGroup); properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false"); properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000"); - properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG,"100000000"); + properties.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG, "100000000"); + properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, "5"); properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getCanonicalName()); properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, @@ -80,6 +83,34 @@ public KafkaConsumer createNewConsumer(String consumerGroup) { return new KafkaConsumer(properties); } + public KafkaConsumer createNewConsumerByTopic(String topic){ + Properties properties = new Properties(); + properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, getKafkaConfig().getBrokers()); + properties.put(ConsumerConfig.GROUP_ID_CONFIG, DEFAULTCP); + properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getCanonicalName()); + properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, + StringDeserializer.class.getCanonicalName()); + KafkaConsumer kafkaConsumer = new KafkaConsumer(properties); + kafkaConsumer.subscribe(Collections.singletonList(topic)); + + return kafkaConsumer; + } + + public KafkaProducer createProducer() { + Properties prop = new Properties(); + prop.setProperty(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaConfig.getBrokers()); + prop.setProperty(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); + prop.setProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); + prop.setProperty(ProducerConfig.RETRIES_CONFIG, "3"); + prop.setProperty(ProducerConfig.REQUEST_TIMEOUT_MS_CONFIG, "10000"); + producer = new KafkaProducer(prop); + + return producer; + } + public Node getLeader(String topic, int partitionId) { KafkaConsumer consumer = createNewConsumer(DEFAULTCP); List tmList = consumer.partitionsFor(topic); diff --git a/src/main/java/org/gnuhpc/bigdata/utils/ZookeeperUtils.java b/src/main/java/org/gnuhpc/bigdata/utils/ZookeeperUtils.java index 433e78f..f4189fb 100644 --- a/src/main/java/org/gnuhpc/bigdata/utils/ZookeeperUtils.java +++ b/src/main/java/org/gnuhpc/bigdata/utils/ZookeeperUtils.java @@ -1,5 +1,6 @@ package org.gnuhpc.bigdata.utils; +import com.google.common.base.Charsets; import kafka.utils.ZKStringSerializer$; import kafka.utils.ZkUtils; import lombok.Getter; @@ -12,23 +13,24 @@ import org.apache.curator.RetryPolicy; import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.framework.api.GetDataBuilder; import org.apache.curator.retry.ExponentialBackoffRetry; +import org.apache.curator.utils.ZKPaths; +import org.apache.zookeeper.data.Stat; import org.gnuhpc.bigdata.config.ZookeeperConfig; import org.gnuhpc.bigdata.constant.ZkServerMode; import org.gnuhpc.bigdata.exception.ServiceNotAvailableException; import org.gnuhpc.bigdata.model.ZkServerClient; import org.gnuhpc.bigdata.model.ZkServerEnvironment; import org.gnuhpc.bigdata.model.ZkServerStat; +import org.gnuhpc.bigdata.validator.ZKNodePathExistConstraint; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.context.annotation.Configuration; +import org.springframework.validation.annotation.Validated; import java.io.IOException; import java.net.InetAddress; import java.net.Socket; -import java.net.UnknownHostException; -import java.util.Iterator; -import java.util.LinkedList; -import java.util.List; +import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; @@ -39,6 +41,7 @@ @Log4j @Setter @Getter +@Validated public class ZookeeperUtils { //For Stat Command parse @@ -233,5 +236,47 @@ public ZkServerEnvironment parseEnvResult(final List result) { return environment; } + public List lsPath(@ZKNodePathExistConstraint String path) { + try { + return curatorClient.getChildren().forPath(path); + } catch (Exception e) { + log.error("ls path fail! path: " + path + ", error: {}" + e); + return null; + } + } + + public Map getNodeData(@ZKNodePathExistConstraint String path) { + Map map = new HashMap<>(); + + try { + List childrens = curatorClient.getChildren().forPath(path); + GetDataBuilder dataBuilder = curatorClient.getData(); + if (childrens != null && childrens.size() > 0) { + for (int i = 0; i < childrens.size(); i++) { + String child = childrens.get(i); + String childPath = ZKPaths.makePath(path, child); + byte[] bytes = dataBuilder.forPath(childPath); + map.put(childPath, (bytes!=null)?(new String(bytes, Charsets.UTF_8)):(null)); + } + } else { + byte[] bytes = dataBuilder.forPath(path); + map.put(path, (bytes!=null)?(new String(bytes, Charsets.UTF_8)):(null)); + } + + } catch (Exception e) { + log.error("get node data fail! path: " + path + ", error: {}" + e); + } + + return map; + } + + public Stat getNodePathStat(String path) { + try { + return curatorClient.checkExists().forPath(path); + } catch (Exception e) { + log.error("get node data fail! path: " + path + ", error: {}" + e); + } + return null; + } } diff --git a/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistConstraint.java b/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistConstraint.java new file mode 100644 index 0000000..910b173 --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistConstraint.java @@ -0,0 +1,19 @@ +package org.gnuhpc.bigdata.validator; + +import javax.validation.Constraint; +import javax.validation.Payload; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +@Constraint(validatedBy = ZKNodePathExistValidator.class) +@Target( { ElementType.METHOD, ElementType.FIELD, ElementType.PARAMETER}) +@Retention(RetentionPolicy.RUNTIME) +public @interface ZKNodePathExistConstraint { + String message() default "Non-exist ZooKeeper Node path!"; + + Class[] groups() default {}; + + Class[] payload() default {}; +} diff --git a/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistValidator.java b/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistValidator.java new file mode 100644 index 0000000..dcf1d5f --- /dev/null +++ b/src/main/java/org/gnuhpc/bigdata/validator/ZKNodePathExistValidator.java @@ -0,0 +1,19 @@ +package org.gnuhpc.bigdata.validator; + +import org.gnuhpc.bigdata.utils.ZookeeperUtils; +import org.springframework.beans.factory.annotation.Autowired; + +import javax.validation.ConstraintValidator; +import javax.validation.ConstraintValidatorContext; + +public class ZKNodePathExistValidator implements ConstraintValidator { + @Autowired + private ZookeeperUtils zookeeperUtils; + + public void initialize(ZKNodePathExistConstraint constraint) { + } + + public boolean isValid(String path, ConstraintValidatorContext context) { + return (zookeeperUtils.getNodePathStat(path)!=null); + } +} diff --git a/src/main/resources/application-dev.yml b/src/main/resources/application-dev.yml index 2a9db40..0c81b28 100644 --- a/src/main/resources/application-dev.yml +++ b/src/main/resources/application-dev.yml @@ -4,6 +4,8 @@ kafka: topic: "__consumer_offsets" partitions: 50 reset: true + healthcheck: + topic: "health" zookeeper: uris: DPFTMP06:2181,DPFTMP07:2181,DPFTMP08:2181,DPFTMP09:2181,DPFTMP10:2181 diff --git a/src/main/resources/application-tina.yml b/src/main/resources/application-tina.yml new file mode 100644 index 0000000..1d4fefd --- /dev/null +++ b/src/main/resources/application-tina.yml @@ -0,0 +1,41 @@ +kafka: + brokers: localhost:19092,localhost:19093,localhost:19095 + offset: + topic: "__consumer_offsets" + partitions: 50 + reset: true + healthcheck: + topic: "health" + +zookeeper: + uris: 127.0.0.1:2183,127.0.0.1:2182 + +jmx: + kafka: + jmxurl: localhost:19999,localhost:29999,localhost:39999 + zookeeper: + jmxurl: localhost:49999 + filterTemplate: JMXFilterTemplate + +server: + port: 8121 + context-path: / + debug: true + security: + check: true + checkInitDelay: 30 + checkSecurityInterval: 5 + +spring: + kafka: + bootstrap-servers: ${kafka.brokers} + consumer: + group-id: "kafka-zk-rest" + mvc: + throw-exception-if-no-handler-found: true + output: + ansi: + enabled: detect + aop: + auto: true + diff --git a/src/main/resources/application.yml b/src/main/resources/application.yml index 79a279c..12f75cf 100644 --- a/src/main/resources/application.yml +++ b/src/main/resources/application.yml @@ -1,6 +1,6 @@ spring: profiles: - active: home + active: tina server: error: diff --git a/src/main/resources/bin/start.sh b/src/main/resources/bin/start.sh index f16a439..f5f7769 100644 --- a/src/main/resources/bin/start.sh +++ b/src/main/resources/bin/start.sh @@ -11,4 +11,4 @@ echo $basedir chmod 755 ${basedir}/logs -java -Xms256m -Xmx512m -server -Xloggc:${basedir}/logs/gc.log -verbose:gc -XX:+PrintGCDetails -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${basedir}/logs -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9888 -Dcom.sun.management.jmxremote.ssl=FALSE -Dcom.sun.management.jmxremote.authenticate=FALSE -Dspring.config.location=${basedir}/config/application.yml -cp $basedir:$basedir/conf:$basedir/libs/* -Dbasedir=${basedir} -jar ${basedir}/lib/kafka-rest-springboot-*-release.jar +java -Xms256m -Xmx512m -server -Xloggc:${basedir}/logs/gc.log -verbose:gc -XX:+PrintGCDetails -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${basedir}/logs -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=9888 -Dcom.sun.management.jmxremote.ssl=FALSE -Dcom.sun.management.jmxremote.authenticate=FALSE -Dsun.rmi.transport.proxy.connectTimeout=5000 -Dsun.rmi.transport.tcp.responseTimeout=5000 -Dspring.config.location=${basedir}/config/application.yml -cp $basedir:$basedir/conf:$basedir/libs/* -Dbasedir=${basedir} -jar ${basedir}/lib/kafka-rest-springboot-*-release.jar