18
18
19
19
use super :: { CollectItemConsume , CollectItemProduce } ;
20
20
use crate :: reporter:: { CollectItem , Report } ;
21
- pub use rdkafka:: config:: { ClientConfig as RDKafkaClientConfig , RDKafkaLogLevel } ;
22
- use rdkafka:: producer:: { FutureProducer , FutureRecord } ;
21
+ use rdkafka:: {
22
+ config:: ClientConfig as RDKafkaClientConfig ,
23
+ producer:: { FutureProducer , FutureRecord } ,
24
+ } ;
23
25
use std:: {
26
+ collections:: HashMap ,
24
27
error,
25
28
future:: { Future , pending} ,
26
29
pin:: Pin ,
@@ -48,6 +51,89 @@ pub enum Error {
48
51
} ,
49
52
}
50
53
54
+ /// Log level for Kafka client.
55
+ #[ derive( Debug , Clone , Copy ) ]
56
+ pub enum LogLevel {
57
+ /// Critical level.
58
+ Critical ,
59
+ /// Error level.
60
+ Error ,
61
+ /// Warning level.
62
+ Warning ,
63
+ /// Notice level.
64
+ Notice ,
65
+ /// Info level.
66
+ Info ,
67
+ /// Debug level.
68
+ Debug ,
69
+ }
70
+
71
+ impl From < LogLevel > for rdkafka:: config:: RDKafkaLogLevel {
72
+ fn from ( level : LogLevel ) -> Self {
73
+ match level {
74
+ LogLevel :: Critical => rdkafka:: config:: RDKafkaLogLevel :: Critical ,
75
+ LogLevel :: Error => rdkafka:: config:: RDKafkaLogLevel :: Error ,
76
+ LogLevel :: Warning => rdkafka:: config:: RDKafkaLogLevel :: Warning ,
77
+ LogLevel :: Notice => rdkafka:: config:: RDKafkaLogLevel :: Notice ,
78
+ LogLevel :: Info => rdkafka:: config:: RDKafkaLogLevel :: Info ,
79
+ LogLevel :: Debug => rdkafka:: config:: RDKafkaLogLevel :: Debug ,
80
+ }
81
+ }
82
+ }
83
+
84
+ /// Configuration for Kafka client.
85
+ #[ derive( Debug , Clone ) ]
86
+ pub struct ClientConfig {
87
+ /// Configuration parameters as key-value pairs.
88
+ params : HashMap < String , String > ,
89
+ /// Log level for the client.
90
+ log_level : Option < LogLevel > ,
91
+ }
92
+
93
+ impl ClientConfig {
94
+ /// Create a new empty configuration.
95
+ pub fn new ( ) -> Self {
96
+ Self {
97
+ params : HashMap :: new ( ) ,
98
+ log_level : None ,
99
+ }
100
+ }
101
+
102
+ /// Set a configuration parameter.
103
+ pub fn set < K , V > ( & mut self , key : K , value : V ) -> & mut Self
104
+ where
105
+ K : Into < String > ,
106
+ V : Into < String > ,
107
+ {
108
+ self . params . insert ( key. into ( ) , value. into ( ) ) ;
109
+ self
110
+ }
111
+
112
+ /// Set log level.
113
+ pub fn set_log_level ( & mut self , level : LogLevel ) -> & mut Self {
114
+ self . log_level = Some ( level) ;
115
+ self
116
+ }
117
+
118
+ /// Convert to rdkafka ClientConfig.
119
+ fn to_rdkafka_config ( & self ) -> RDKafkaClientConfig {
120
+ let mut config = RDKafkaClientConfig :: new ( ) ;
121
+ for ( key, value) in & self . params {
122
+ config. set ( key, value) ;
123
+ }
124
+ if let Some ( log_level) = self . log_level {
125
+ config. set_log_level ( log_level. into ( ) ) ;
126
+ }
127
+ config
128
+ }
129
+ }
130
+
131
+ impl Default for ClientConfig {
132
+ fn default ( ) -> Self {
133
+ Self :: new ( )
134
+ }
135
+ }
136
+
51
137
type DynErrHandler = dyn Fn ( & str , & dyn error:: Error ) + Send + Sync + ' static ;
52
138
53
139
fn default_err_handle ( message : & str , err : & dyn error:: Error ) {
@@ -71,14 +157,14 @@ pub struct KafkaReportBuilder<P, C> {
71
157
state : Arc < State > ,
72
158
producer : Arc < P > ,
73
159
consumer : C ,
74
- client_config : RDKafkaClientConfig ,
160
+ client_config : ClientConfig ,
75
161
namespace : Option < String > ,
76
162
err_handle : Arc < DynErrHandler > ,
77
163
}
78
164
79
165
impl KafkaReportBuilder < mpsc:: UnboundedSender < CollectItem > , mpsc:: UnboundedReceiver < CollectItem > > {
80
- /// Create builder, with rdkafka client configuration.
81
- pub fn new ( client_config : RDKafkaClientConfig ) -> Self {
166
+ /// Create builder, with client configuration.
167
+ pub fn new ( client_config : ClientConfig ) -> Self {
82
168
let ( producer, consumer) = mpsc:: unbounded_channel ( ) ;
83
169
Self :: new_with_pc ( client_config, producer, consumer)
84
170
}
@@ -87,7 +173,7 @@ impl KafkaReportBuilder<mpsc::UnboundedSender<CollectItem>, mpsc::UnboundedRecei
87
173
impl < P : CollectItemProduce , C : CollectItemConsume > KafkaReportBuilder < P , C > {
88
174
/// Special purpose, used for user-defined produce and consume operations,
89
175
/// usually you can use [KafkaReportBuilder::new].
90
- pub fn new_with_pc ( client_config : RDKafkaClientConfig , producer : P , consumer : C ) -> Self {
176
+ pub fn new_with_pc ( client_config : ClientConfig , producer : P , consumer : C ) -> Self {
91
177
Self {
92
178
state : Default :: default ( ) ,
93
179
producer : Arc :: new ( producer) ,
@@ -118,7 +204,7 @@ impl<P: CollectItemProduce, C: CollectItemConsume> KafkaReportBuilder<P, C> {
118
204
/// handle to push data to kafka in the background.
119
205
pub async fn build ( self ) -> Result < ( KafkaReporter < P > , KafkaReporting < C > ) , Error > {
120
206
let kafka_producer = KafkaProducer :: new (
121
- self . client_config . create ( ) ?,
207
+ self . client_config . to_rdkafka_config ( ) . create ( ) ?,
122
208
self . err_handle . clone ( ) ,
123
209
self . namespace ,
124
210
)
0 commit comments