diff --git a/README.md b/README.md
index ba169ef4f9be9ba29291d72051f05a294b74e794..c1e4943a1a4e8dcadccd1741197e47e046d6bc2b 100644
--- a/README.md
+++ b/README.md
@@ -17,15 +17,15 @@ Mesure latency between record insertion in Kafka and Streaming processing using
 - fr.ippon.kafkaLatency.AlimKafka : Kafka Producer (100 000 messages)
 
 ### Consummer
- fr.ippon.kafkaLatency.streaming.KafkaSpark : Spark streaming consummer
- fr.ippon.kafkaLatency.streaming.KafkaStructuredSpark : Spark Structured streaming consummer
- fr.ippon.kafkaLatency.streaming.KafkaContinuousSpark : Spark Continous processing consummer
- fr.ippon.kafkaLatency.streaming.KafkaStreaming : Kafka Streams consummer
- fr.ippon.kafkaLatency.streaming.KafkaFlink : Flink consummer
+- fr.ippon.kafkaLatency.streaming.KafkaSpark : Spark streaming consummer
+- fr.ippon.kafkaLatency.streaming.KafkaStructuredSpark : Spark Structured streaming consummer
+- fr.ippon.kafkaLatency.streaming.KafkaContinuousSpark : Spark Continous processing consummer
+- fr.ippon.kafkaLatency.streaming.KafkaStreaming : Kafka Streams consummer
+- fr.ippon.kafkaLatency.streaming.KafkaFlink : Flink consummer
 
 ### Utils
- fr.ippon.kafkaLatency.utils.UtilStats : Streaming statistics
- fr.ippon.kafkaLatency.utils.MetricsProducerReporter : Kafka Producer Metrics
+- fr.ippon.kafkaLatency.utils.UtilStats : Streaming statistics
+- fr.ippon.kafkaLatency.utils.MetricsProducerReporter : Kafka Producer Metrics
 
 ## Resources
 ### config.properties :
@@ -36,17 +36,12 @@ Mesure latency between record insertion in Kafka and Streaming processing using
     - bootstrap.servers: List of Kafka servers
 
  - Streaming
-    - stats.step=1000
-    - stats.total=100000
-    - max.poll.records=1000
-    - spark.continuous.interval=300
-    - spark.structured.interval=150
-    - spark.streaming.interval=50
+    - stats.step : Number of message to group before calculate stats 
+    - stats.total : Total number of messages
+    - max.poll.records : Max records to fetch in Kafka
+    - spark.continuous.interval : Trigger interval in spark continous processing
+    - spark.structured.interval : Trigger interval in spark structured streaming
+    - spark.streaming.interval : Interval of micro batch
 
  - data source
-    - listPersonns.json
-
-
-
-
-
+    - listPersonns.json
\ No newline at end of file