@@ -58,35 +58,29 @@ module Kafka.Producer
58
58
, module X
59
59
, runProducer
60
60
, newProducer
61
- , produceMessage, produceMessageBatch, produceMessageWithHeaders
62
- , produceMessage', produceMessageWithHeaders'
61
+ , produceMessage
62
+ , produceMessage'
63
63
, flushProducer
64
64
, closeProducer
65
65
, RdKafkaRespErrT (.. )
66
66
)
67
67
where
68
68
69
- import Control.Arrow ((&&&) )
70
69
import Control.Exception (bracket )
71
- import Control.Monad (forM , forM_ , (<=<) )
70
+ import Control.Monad (forM_ )
72
71
import Control.Monad.IO.Class (MonadIO (liftIO ))
73
72
import qualified Data.ByteString as BS
74
73
import qualified Data.ByteString.Internal as BSI
75
- import Data.Function (on )
76
- import Data.List (groupBy , sortBy )
77
- import Data.Ord (comparing )
78
74
import qualified Data.Text as Text
79
75
import Foreign.C.String (withCString )
80
- import Foreign.ForeignPtr (newForeignPtr_ , withForeignPtr )
81
- import Foreign.Marshal.Array (withArrayLen )
76
+ import Foreign.ForeignPtr (withForeignPtr )
82
77
import Foreign.Marshal.Utils (withMany )
83
78
import Foreign.Ptr (Ptr , nullPtr , plusPtr )
84
- import Foreign.Storable (Storable (.. ))
85
79
import Foreign.StablePtr (newStablePtr , castStablePtrToPtr )
86
- import Kafka.Internal.RdKafka (RdKafkaMessageT ( .. ), RdKafkaRespErrT (.. ), RdKafkaTypeT (.. ), RdKafkaVuT (.. ), destroyUnmanagedRdKafkaTopic , newRdKafkaT , newUnmanagedRdKafkaTopicT , rdKafkaErrorCode , rdKafkaErrorDestroy , rdKafkaOutqLen , rdKafkaProduceBatch , rdKafkaMessageProduceVa , rdKafkaSetLogLevel )
87
- import Kafka.Internal.Setup (Kafka (.. ), KafkaConf (.. ), KafkaProps (.. ), TopicConf ( .. ), TopicProps (.. ), kafkaConf , topicConf , Callback (.. ))
80
+ import Kafka.Internal.RdKafka (RdKafkaRespErrT (.. ), RdKafkaTypeT (.. ), RdKafkaVuT (.. ), newRdKafkaT , rdKafkaErrorCode , rdKafkaErrorDestroy , rdKafkaOutqLen , rdKafkaMessageProduceVa , rdKafkaSetLogLevel )
81
+ import Kafka.Internal.Setup (Kafka (.. ), KafkaConf (.. ), KafkaProps (.. ), TopicProps (.. ), kafkaConf , topicConf , Callback (.. ))
88
82
import Kafka.Internal.Shared (pollEvents )
89
- import Kafka.Producer.Convert (copyMsgFlags , handleProduceErrT , producePartitionCInt , producePartitionInt )
83
+ import Kafka.Producer.Convert (copyMsgFlags , handleProduceErrT , producePartitionCInt )
90
84
import Kafka.Producer.Types (KafkaProducer (.. ))
91
85
92
86
import Kafka.Producer.ProducerProperties as X
@@ -95,7 +89,7 @@ import Kafka.Types as X
95
89
96
90
-- | Runs Kafka Producer.
97
91
-- The callback provided is expected to call 'produceMessage'
98
- -- or/and 'produceMessageBatch' to send messages to Kafka.
92
+ -- to send messages to Kafka.
99
93
{-# DEPRECATED runProducer "Use 'newProducer'/'closeProducer' instead" #-}
100
94
runProducer :: ProducerProperties
101
95
-> (KafkaProducer -> IO (Either KafkaError a ))
@@ -146,42 +140,16 @@ produceMessage kp m = produceMessage' kp m (pure . mempty) >>= adjustRes
146
140
Right () -> pure Nothing
147
141
Left (ImmediateError err) -> pure (Just err)
148
142
149
- -- | Sends a single message with a registered callback and headers.
150
- produceMessageWithHeaders :: MonadIO m
151
- => KafkaProducer
152
- -> Headers
153
- -> ProducerRecord
154
- -> m (Maybe KafkaError )
155
- produceMessageWithHeaders kp headers msg = produceMessageWithHeaders' kp headers msg (pure . mempty ) >>= adjustRes
156
- where
157
- adjustRes = \ case
158
- Right () -> pure Nothing
159
- Left (ImmediateError err) -> pure (Just err)
160
-
161
143
-- | Sends a single message with a registered callback.
162
144
--
163
145
-- The callback can be a long running process, as it is forked by the thread
164
146
-- that handles the delivery reports.
165
- --
166
147
produceMessage' :: MonadIO m
167
148
=> KafkaProducer
168
149
-> ProducerRecord
169
150
-> (DeliveryReport -> IO () )
170
151
-> m (Either ImmediateError () )
171
- produceMessage' kp = produceMessageWithHeaders' kp mempty
172
-
173
- -- | Sends a single message with a registered callback and headers.
174
- --
175
- -- The callback can be a long running process, as it is forked by the thread
176
- -- that handles the delivery reports.
177
- --
178
- produceMessageWithHeaders' :: MonadIO m
179
- => KafkaProducer
180
- -> Headers
181
- -> ProducerRecord
182
- -> (DeliveryReport -> IO () )
183
- -> m (Either ImmediateError () )
184
- produceMessageWithHeaders' kp@ (KafkaProducer (Kafka k) _ _) headers msg cb = liftIO $
152
+ produceMessage' kp@ (KafkaProducer (Kafka k) _ _) msg cb = liftIO $
185
153
fireCallbacks >> produceIt
186
154
where
187
155
fireCallbacks =
@@ -190,7 +158,7 @@ produceMessageWithHeaders' kp@(KafkaProducer (Kafka k) _ _) headers msg cb = lif
190
158
produceIt =
191
159
withBS (prValue msg) $ \ payloadPtr payloadLength ->
192
160
withBS (prKey msg) $ \ keyPtr keyLength ->
193
- withHeaders headers $ \ hdrs ->
161
+ withHeaders (prHeaders msg) $ \ hdrs ->
194
162
withCString (Text. unpack . unTopicName . prTopic $ msg) $ \ topicName -> do
195
163
callbackPtr <- newStablePtr cb
196
164
let opts = [
@@ -208,59 +176,6 @@ produceMessageWithHeaders' kp@(KafkaProducer (Kafka k) _ _) headers msg cb = lif
208
176
Just err -> Left . ImmediateError $ err
209
177
Nothing -> Right ()
210
178
211
- -- | Sends a batch of messages.
212
- -- Returns a list of messages which it was unable to send with corresponding errors.
213
- -- Since librdkafka is backed by a queue, this function can return before messages are sent. See
214
- -- 'flushProducer' to wait for queue to empty.
215
- produceMessageBatch :: MonadIO m
216
- => KafkaProducer
217
- -> [ProducerRecord ]
218
- -> m [(ProducerRecord , KafkaError )]
219
- -- ^ An empty list when the operation is successful,
220
- -- otherwise a list of "failed" messages with corresponsing errors.
221
- produceMessageBatch kp@ (KafkaProducer (Kafka k) _ (TopicConf tc)) messages = liftIO $ do
222
- pollEvents kp (Just $ Timeout 0 ) -- fire callbacks if any exist (handle delivery reports)
223
- concat <$> forM (mkBatches messages) sendBatch
224
- where
225
- mkSortKey = prTopic &&& prPartition
226
- mkBatches = groupBy ((==) `on` mkSortKey) . sortBy (comparing mkSortKey)
227
-
228
- mkTopic (TopicName tn) = newUnmanagedRdKafkaTopicT k (Text. unpack tn) (Just tc)
229
-
230
- clTopic = either (return . const () ) destroyUnmanagedRdKafkaTopic
231
-
232
- sendBatch [] = return []
233
- sendBatch batch = bracket (mkTopic $ prTopic (head batch)) clTopic (withTopic batch)
234
-
235
- withTopic ms (Left err) = return $ (, KafkaError (Text. pack err)) <$> ms
236
- withTopic ms (Right t) = do
237
- let (partInt, partCInt) = (producePartitionInt &&& producePartitionCInt) $ prPartition (head ms)
238
- withForeignPtr t $ \ topicPtr -> do
239
- nativeMs <- forM ms (toNativeMessage topicPtr partInt)
240
- withArrayLen nativeMs $ \ len batchPtr -> do
241
- batchPtrF <- newForeignPtr_ batchPtr
242
- numRet <- rdKafkaProduceBatch t partCInt copyMsgFlags batchPtrF len
243
- if numRet == len then return []
244
- else do
245
- errs <- mapM (return . err'RdKafkaMessageT <=< peekElemOff batchPtr)
246
- [0 .. (fromIntegral $ len - 1 )]
247
- return [(m, KafkaResponseError e) | (m, e) <- zip messages errs, e /= RdKafkaRespErrNoError ]
248
-
249
- toNativeMessage t p m =
250
- withBS (prValue m) $ \ payloadPtr payloadLength ->
251
- withBS (prKey m) $ \ keyPtr keyLength ->
252
- return RdKafkaMessageT
253
- { err'RdKafkaMessageT = RdKafkaRespErrNoError
254
- , topic'RdKafkaMessageT = t
255
- , partition'RdKafkaMessageT = p
256
- , len'RdKafkaMessageT = payloadLength
257
- , payload'RdKafkaMessageT = payloadPtr
258
- , offset'RdKafkaMessageT = 0
259
- , keyLen'RdKafkaMessageT = keyLength
260
- , key'RdKafkaMessageT = keyPtr
261
- , opaque'RdKafkaMessageT = nullPtr
262
- }
263
-
264
179
-- | Closes the producer.
265
180
-- Will wait until the outbound queue is drained before returning the control.
266
181
closeProducer :: MonadIO m => KafkaProducer -> m ()
0 commit comments