From 5a092ab4fb57d724f9dd0371109090fbf1d974dc Mon Sep 17 00:00:00 2001
From: Christophe PARAGEAUD <cparageaud@ippon.fr>
Date: Fri, 30 Mar 2018 17:19:34 +0200
Subject: [PATCH] Update README.md

---
 README.md | 33 ++++++++++++++-------------------
 1 file changed, 14 insertions(+), 19 deletions(-)

diff --git a/README.md b/README.md
index ba169ef..c1e4943 100644
--- a/README.md
+++ b/README.md
@@ -17,15 +17,15 @@ Mesure latency between record insertion in Kafka and Streaming processing using
 - fr.ippon.kafkaLatency.AlimKafka : Kafka Producer (100 000 messages)
 
 ### Consummer
- fr.ippon.kafkaLatency.streaming.KafkaSpark : Spark streaming consummer
- fr.ippon.kafkaLatency.streaming.KafkaStructuredSpark : Spark Structured streaming consummer
- fr.ippon.kafkaLatency.streaming.KafkaContinuousSpark : Spark Continous processing consummer
- fr.ippon.kafkaLatency.streaming.KafkaStreaming : Kafka Streams consummer
- fr.ippon.kafkaLatency.streaming.KafkaFlink : Flink consummer
+- fr.ippon.kafkaLatency.streaming.KafkaSpark : Spark streaming consummer
+- fr.ippon.kafkaLatency.streaming.KafkaStructuredSpark : Spark Structured streaming consummer
+- fr.ippon.kafkaLatency.streaming.KafkaContinuousSpark : Spark Continous processing consummer
+- fr.ippon.kafkaLatency.streaming.KafkaStreaming : Kafka Streams consummer
+- fr.ippon.kafkaLatency.streaming.KafkaFlink : Flink consummer
 
 ### Utils
- fr.ippon.kafkaLatency.utils.UtilStats : Streaming statistics
- fr.ippon.kafkaLatency.utils.MetricsProducerReporter : Kafka Producer Metrics
+- fr.ippon.kafkaLatency.utils.UtilStats : Streaming statistics
+- fr.ippon.kafkaLatency.utils.MetricsProducerReporter : Kafka Producer Metrics
 
 ## Resources
 ### config.properties :
@@ -36,17 +36,12 @@ Mesure latency between record insertion in Kafka and Streaming processing using
     - bootstrap.servers: List of Kafka servers
 
  - Streaming
-    - stats.step=1000
-    - stats.total=100000
-    - max.poll.records=1000
-    - spark.continuous.interval=300
-    - spark.structured.interval=150
-    - spark.streaming.interval=50
+    - stats.step : Number of message to group before calculate stats 
+    - stats.total : Total number of messages
+    - max.poll.records : Max records to fetch in Kafka
+    - spark.continuous.interval : Trigger interval in spark continous processing
+    - spark.structured.interval : Trigger interval in spark structured streaming
+    - spark.streaming.interval : Interval of micro batch
 
  - data source
-    - listPersonns.json
-
-
-
-
-
+    - listPersonns.json
\ No newline at end of file
-- 
GitLab