Skip to content

Commit 5e61094

Browse files
authored
Merge pull request #179 from mlevashov/record_headers
Add headers to consumed/produced records
2 parents 1bfc027 + 72e6f6d commit 5e61094

File tree

16 files changed

+281
-145
lines changed

16 files changed

+281
-145
lines changed

example/ProducerExample.hs

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ mkMessage k v = ProducerRecord
3131
, prPartition = UnassignedPartition
3232
, prKey = k
3333
, prValue = v
34+
, prHeaders = mempty
3435
}
3536

3637
-- Run an example
@@ -61,12 +62,8 @@ sendMessages prod = do
6162
putStrLn "And the last one..."
6263
msg3 <- getLine
6364
err3 <- produceMessage prod (mkMessage (Just "key3") (Just $ pack msg3))
64-
65-
-- errs <- produceMessageBatch prod
66-
-- [ mkMessage (Just "b-1") (Just "batch-1")
67-
-- , mkMessage (Just "b-2") (Just "batch-2")
68-
-- , mkMessage Nothing (Just "batch-3")
69-
-- ]
65+
66+
err4 <- produceMessage prod ((mkMessage (Just "key4") (Just $ pack msg3)) { prHeaders = headersFromList [("fancy", "header")]})
7067

7168
-- forM_ errs (print . snd)
7269

nix/sources.json

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -5,22 +5,22 @@
55
"homepage": "https://github.com/nmattia/niv",
66
"owner": "nmattia",
77
"repo": "niv",
8-
"rev": "af958e8057f345ee1aca714c1247ef3ba1c15f5e",
9-
"sha256": "1qjavxabbrsh73yck5dcq8jggvh3r2jkbr6b5nlz5d9yrqm9255n",
8+
"rev": "65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e",
9+
"sha256": "17mirpsx5wyw262fpsd6n6m47jcgw8k2bwcp1iwdnrlzy4dhcgqh",
1010
"type": "tarball",
11-
"url": "https://github.com/nmattia/niv/archive/af958e8057f345ee1aca714c1247ef3ba1c15f5e.tar.gz",
11+
"url": "https://github.com/nmattia/niv/archive/65a61b147f307d24bfd0a5cd56ce7d7b7cc61d2e.tar.gz",
1212
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
1313
},
1414
"nixpkgs": {
15-
"branch": "release-19.03",
15+
"branch": "nixos-21.05",
1616
"description": "Nix Packages collection",
1717
"homepage": "",
1818
"owner": "NixOS",
1919
"repo": "nixpkgs",
20-
"rev": "da0c385a691d38b56b17eb18b852c4cec2050c24",
21-
"sha256": "0svhqn139cy2nlgv4kqv1bsxza2dcm0yylrhnmanw4p73gv85caf",
20+
"rev": "ce7a1190a0fa4ba3465b5f5471b08567060ca14c",
21+
"sha256": "1zr1s9gp0h5g4arlba1bpb9yqfaaby5195ydm6a2psaxhm748li9",
2222
"type": "tarball",
23-
"url": "https://github.com/NixOS/nixpkgs/archive/da0c385a691d38b56b17eb18b852c4cec2050c24.tar.gz",
23+
"url": "https://github.com/NixOS/nixpkgs/archive/ce7a1190a0fa4ba3465b5f5471b08567060ca14c.tar.gz",
2424
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz"
2525
}
2626
}

scripts/build-librdkafka

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
#!/bin/bash
22

3-
RDKAFKA_VER="849c066b559950b02e37a69256f0cb7b04381d0e"
3+
RDKAFKA_VER="1a722553638bba85dbda5050455f7b9a5ef302de"
44

55
PRJ=$PWD
66
DST="$PRJ/.librdkafka"

shell.nix

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ pkgs.mkShell {
77
rdkafka
88
nettools
99
niv
10+
gmp
1011
];
1112

1213
shellHook = ''

src/Kafka/Consumer/Convert.hs

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ where
1818

1919
import Control.Monad ((>=>))
2020
import qualified Data.ByteString as BS
21+
import Data.Either (fromRight)
2122
import Data.Int (Int64)
2223
import Data.Map.Strict (Map, fromListWith)
2324
import qualified Data.Set as S
@@ -41,7 +42,7 @@ import Kafka.Internal.RdKafka
4142
, rdKafkaTopicPartitionListNew
4243
, peekCText
4344
)
44-
import Kafka.Internal.Shared (kafkaRespErr, readTopic, readKey, readPayload, readTimestamp)
45+
import Kafka.Internal.Shared (kafkaRespErr, readHeaders, readTopic, readKey, readPayload, readTimestamp)
4546
import Kafka.Types (KafkaError(..), PartitionId(..), TopicName(..))
4647

4748
-- | Converts offsets sync policy to integer (the way Kafka understands it):
@@ -158,20 +159,22 @@ fromMessagePtr ptr =
158159
s <- peek realPtr
159160
msg <- if err'RdKafkaMessageT s /= RdKafkaRespErrNoError
160161
then return . Left . KafkaResponseError $ err'RdKafkaMessageT s
161-
else Right <$> mkRecord s
162+
else Right <$> mkRecord s realPtr
162163
rdKafkaMessageDestroy realPtr
163164
return msg
164165
where
165-
mkRecord msg = do
166+
mkRecord msg rptr = do
166167
topic <- readTopic msg
167168
key <- readKey msg
168169
payload <- readPayload msg
169170
timestamp <- readTimestamp ptr
171+
headers <- fromRight mempty <$> readHeaders rptr
170172
return ConsumerRecord
171173
{ crTopic = TopicName topic
172174
, crPartition = PartitionId $ partition'RdKafkaMessageT msg
173175
, crOffset = Offset $ offset'RdKafkaMessageT msg
174176
, crTimestamp = timestamp
177+
, crHeaders = headers
175178
, crKey = key
176179
, crValue = payload
177180
}

src/Kafka/Consumer/Types.hs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ import Data.Text (Text)
4343
import Data.Typeable (Typeable)
4444
import GHC.Generics (Generic)
4545
import Kafka.Internal.Setup (HasKafka (..), HasKafkaConf (..), Kafka (..), KafkaConf (..))
46-
import Kafka.Types (Millis (..), PartitionId (..), TopicName (..))
46+
import Kafka.Types (Millis (..), PartitionId (..), TopicName (..), Headers)
4747

4848
-- | The main type for Kafka consumption, used e.g. to poll and commit messages.
4949
--
@@ -143,13 +143,14 @@ data ConsumerRecord k v = ConsumerRecord
143143
, crPartition :: !PartitionId -- ^ Kafka partition this message was received from
144144
, crOffset :: !Offset -- ^ Offset within the 'crPartition' Kafka partition
145145
, crTimestamp :: !Timestamp -- ^ Message timestamp
146+
, crHeaders :: !Headers -- ^ Message headers
146147
, crKey :: !k -- ^ Message key
147148
, crValue :: !v -- ^ Message value
148149
}
149150
deriving (Eq, Show, Read, Typeable, Generic)
150151

151152
instance Bifunctor ConsumerRecord where
152-
bimap f g (ConsumerRecord t p o ts k v) = ConsumerRecord t p o ts (f k) (g v)
153+
bimap f g (ConsumerRecord t p o ts hds k v) = ConsumerRecord t p o ts hds (f k) (g v)
153154
{-# INLINE bimap #-}
154155

155156
instance Functor (ConsumerRecord k) where

src/Kafka/Internal/RdKafka.chs

Lines changed: 115 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -13,13 +13,13 @@ import Data.Word (Word8)
1313
import Foreign.Concurrent (newForeignPtr)
1414
import qualified Foreign.Concurrent as Concurrent
1515
import Foreign.Marshal.Alloc (alloca, allocaBytes)
16-
import Foreign.Marshal.Array (peekArray, allocaArray)
16+
import Foreign.Marshal.Array (peekArray, allocaArray, withArrayLen)
1717
import Foreign.Storable (Storable(..))
1818
import Foreign.Ptr (Ptr, FunPtr, castPtr, nullPtr)
1919
import Foreign.ForeignPtr (FinalizerPtr, addForeignPtrFinalizer, newForeignPtr_, withForeignPtr)
2020
import Foreign.C.Error (Errno(..), getErrno)
2121
import Foreign.C.String (CString, newCString, withCAString, peekCAString, peekCString)
22-
import Foreign.C.Types (CFile, CInt(..), CSize, CChar)
22+
import Foreign.C.Types (CFile, CInt(..), CSize, CChar, CLong)
2323
import System.IO (Handle, stdin, stdout, stderr)
2424
import System.Posix.IO (handleToFd)
2525
import System.Posix.Types (Fd(..))
@@ -972,6 +972,118 @@ newRdKafkaTopicT kafkaPtr topic topicConfPtr = do
972972
_ <- traverse (addForeignPtrFinalizer rdKafkaTopicDestroy') res
973973
return res
974974

975+
-------------------------------------------------------------------------------------------------
976+
---- Errors
977+
978+
data RdKafkaErrorT
979+
{#pointer *rd_kafka_error_t as RdKafkaErrorTPtr -> RdKafkaErrorT #}
980+
981+
{#fun rd_kafka_error_code as ^
982+
{`RdKafkaErrorTPtr'} -> `RdKafkaRespErrT' cIntToEnum #}
983+
984+
{#fun rd_kafka_error_destroy as ^
985+
{`RdKafkaErrorTPtr'} -> `()' #}
986+
-------------------------------------------------------------------------------------------------
987+
---- Headers
988+
989+
data RdKafkaHeadersT
990+
{#pointer *rd_kafka_headers_t as RdKafkaHeadersTPtr -> RdKafkaHeadersT #}
991+
992+
{#fun rd_kafka_header_get_all as ^
993+
{`RdKafkaHeadersTPtr', cIntConv `CSize', castPtr `Ptr CString', castPtr `Ptr Word8Ptr', `CSizePtr'} -> `RdKafkaRespErrT' cIntToEnum #}
994+
995+
{#fun rd_kafka_message_headers as ^
996+
{castPtr `Ptr RdKafkaMessageT', alloca- `RdKafkaHeadersTPtr' peekPtr*} -> `RdKafkaRespErrT' cIntToEnum #}
997+
998+
--- Produceva api
999+
1000+
{#enum rd_kafka_vtype_t as ^ {underscoreToCase} deriving (Show, Eq) #}
1001+
1002+
data RdKafkaVuT
1003+
= Topic'RdKafkaVu CString
1004+
| TopicHandle'RdKafkaVu (Ptr RdKafkaTopicT)
1005+
| Partition'RdKafkaVu CInt32T
1006+
| Value'RdKafkaVu Word8Ptr CSize
1007+
| Key'RdKafkaVu Word8Ptr CSize
1008+
| MsgFlags'RdKafkaVu CInt
1009+
| Timestamp'RdKafkaVu CInt64T
1010+
| Opaque'RdKafkaVu (Ptr ())
1011+
| Header'RdKafkaVu CString Word8Ptr CSize
1012+
| Headers'RdKafkaVu (Ptr RdKafkaHeadersT) -- The message object will assume ownership of the headers (unless produceva() fails)
1013+
| End'RdKafkaVu
1014+
1015+
{#pointer *rd_kafka_vu_t as RdKafkaVuTPtr foreign -> RdKafkaVuT #}
1016+
1017+
instance Storable RdKafkaVuT where
1018+
alignment _ = {#alignof rd_kafka_vu_t #}
1019+
sizeOf _ = {#sizeof rd_kafka_vu_t #}
1020+
peek p = {#get rd_kafka_vu_t->vtype #} p >>= \a -> case cIntToEnum a of
1021+
RdKafkaVtypeEnd -> return End'RdKafkaVu
1022+
RdKafkaVtypeTopic -> Topic'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.cstr #} p)
1023+
RdKafkaVtypeMsgflags -> MsgFlags'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.i #} p)
1024+
RdKafkaVtypeTimestamp -> Timestamp'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.i64 #} p)
1025+
RdKafkaVtypePartition -> Partition'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.i32 #} p)
1026+
RdKafkaVtypeHeaders -> Headers'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.headers #} p)
1027+
RdKafkaVtypeValue -> do
1028+
nm <- liftM castPtr ({#get rd_kafka_vu_t->u.mem.ptr #} p)
1029+
sz <- ({#get rd_kafka_vu_t->u.mem.size #} p)
1030+
return $ Value'RdKafkaVu nm (cIntConv sz)
1031+
RdKafkaVtypeKey -> do
1032+
nm <- liftM castPtr ({#get rd_kafka_vu_t->u.mem.ptr #} p)
1033+
sz <- ({#get rd_kafka_vu_t->u.mem.size #} p)
1034+
return $ Key'RdKafkaVu nm (cIntConv sz)
1035+
RdKafkaVtypeRkt -> TopicHandle'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.rkt #} p)
1036+
RdKafkaVtypeOpaque -> Opaque'RdKafkaVu <$> ({#get rd_kafka_vu_t->u.ptr #} p)
1037+
RdKafkaVtypeHeader -> do
1038+
nm <- ({#get rd_kafka_vu_t->u.header.name #} p)
1039+
val' <- liftM castPtr ({#get rd_kafka_vu_t->u.header.val #} p)
1040+
sz <- ({#get rd_kafka_vu_t->u.header.size #} p)
1041+
return $ Header'RdKafkaVu nm val' (cIntConv sz)
1042+
poke p End'RdKafkaVu =
1043+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeEnd)
1044+
poke p (Topic'RdKafkaVu str) = do
1045+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeTopic)
1046+
{#set rd_kafka_vu_t.u.cstr #} p str
1047+
poke p (Timestamp'RdKafkaVu tms) = do
1048+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeTimestamp)
1049+
{#set rd_kafka_vu_t.u.i64 #} p tms
1050+
poke p (Partition'RdKafkaVu prt) = do
1051+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypePartition)
1052+
{#set rd_kafka_vu_t.u.i32 #} p prt
1053+
poke p (MsgFlags'RdKafkaVu flags) = do
1054+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeMsgflags)
1055+
{#set rd_kafka_vu_t.u.i #} p flags
1056+
poke p (Headers'RdKafkaVu headers) = do
1057+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeHeaders)
1058+
{#set rd_kafka_vu_t.u.headers #} p headers
1059+
poke p (TopicHandle'RdKafkaVu tphandle) = do
1060+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeRkt)
1061+
{#set rd_kafka_vu_t.u.rkt #} p tphandle
1062+
poke p (Value'RdKafkaVu pl sz) = do
1063+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeValue)
1064+
{#set rd_kafka_vu_t.u.mem.size #} p (cIntConv sz)
1065+
{#set rd_kafka_vu_t.u.mem.ptr #} p (castPtr pl)
1066+
poke p (Key'RdKafkaVu pl sz) = do
1067+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeKey)
1068+
{#set rd_kafka_vu_t.u.mem.size #} p (cIntConv sz)
1069+
{#set rd_kafka_vu_t.u.mem.ptr #} p (castPtr pl)
1070+
poke p (Opaque'RdKafkaVu ptr') = do
1071+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeOpaque)
1072+
{#set rd_kafka_vu_t.u.ptr #} p ptr'
1073+
poke p (Header'RdKafkaVu nm val' sz) = do
1074+
{#set rd_kafka_vu_t.vtype #} p (enumToCInt RdKafkaVtypeHeader)
1075+
{#set rd_kafka_vu_t.u.header.size #} p (cIntConv sz)
1076+
{#set rd_kafka_vu_t.u.header.name #} p nm
1077+
{#set rd_kafka_vu_t.u.header.val #} p (castPtr val')
1078+
1079+
{#fun rd_kafka_produceva as rdKafkaMessageProduceVa'
1080+
{`RdKafkaTPtr', `RdKafkaVuTPtr', `CLong'} -> `RdKafkaErrorTPtr' #}
1081+
1082+
rdKafkaMessageProduceVa :: RdKafkaTPtr -> [RdKafkaVuT] -> IO RdKafkaErrorTPtr
1083+
rdKafkaMessageProduceVa kafkaPtr vts = withArrayLen vts $ \i arrPtr -> do
1084+
fptr <- newForeignPtr_ arrPtr
1085+
rdKafkaMessageProduceVa' kafkaPtr fptr (cIntConv i)
1086+
9751087
-- Marshall / Unmarshall
9761088
enumToCInt :: Enum a => a -> CInt
9771089
enumToCInt = fromIntegral . fromEnum
@@ -1013,4 +1125,4 @@ c_stdin = handleToCFile stdin "r"
10131125
c_stdout :: IO CFilePtr
10141126
c_stdout = handleToCFile stdout "w"
10151127
c_stderr :: IO CFilePtr
1016-
c_stderr = handleToCFile stderr "w"
1128+
c_stderr = handleToCFile stderr "w"

src/Kafka/Internal/Shared.hs

Lines changed: 29 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
{-# LANGUAGE LambdaCase #-}
2+
13
module Kafka.Internal.Shared
24
( pollEvents
35
, word8PtrToBS
@@ -8,6 +10,7 @@ module Kafka.Internal.Shared
810
, kafkaErrorToEither
911
, kafkaErrorToMaybe
1012
, maybeToLeft
13+
, readHeaders
1114
, readPayload
1215
, readTopic
1316
, readKey
@@ -29,9 +32,9 @@ import Foreign.Marshal.Alloc (alloca)
2932
import Foreign.Ptr (Ptr, nullPtr)
3033
import Foreign.Storable (Storable (peek))
3134
import Kafka.Consumer.Types (Timestamp (..))
32-
import Kafka.Internal.RdKafka (RdKafkaMessageT (..), RdKafkaMessageTPtr, RdKafkaRespErrT (..), RdKafkaTimestampTypeT (..), Word8Ptr, rdKafkaErrno2err, rdKafkaMessageTimestamp, rdKafkaPoll, rdKafkaTopicName)
35+
import Kafka.Internal.RdKafka (RdKafkaMessageT (..), RdKafkaMessageTPtr, RdKafkaRespErrT (..), RdKafkaTimestampTypeT (..), Word8Ptr, rdKafkaErrno2err, rdKafkaMessageTimestamp, rdKafkaPoll, rdKafkaTopicName, rdKafkaHeaderGetAll, rdKafkaMessageHeaders)
3336
import Kafka.Internal.Setup (HasKafka (..), Kafka (..))
34-
import Kafka.Types (KafkaError (..), Millis (..), Timeout (..))
37+
import Kafka.Types (KafkaError (..), Millis (..), Timeout (..), Headers, headersFromList)
3538

3639
pollEvents :: HasKafka a => a -> Maybe Timeout -> IO ()
3740
pollEvents a tm =
@@ -102,6 +105,30 @@ readTimestamp msg =
102105
RdKafkaTimestampLogAppendTime -> LogAppendTime (Millis ts)
103106
RdKafkaTimestampNotAvailable -> NoTimestamp
104107

108+
109+
readHeaders :: Ptr RdKafkaMessageT -> IO (Either RdKafkaRespErrT Headers)
110+
readHeaders msg = do
111+
(err, headersPtr) <- rdKafkaMessageHeaders msg
112+
case err of
113+
RdKafkaRespErrNoent -> return $ Right mempty
114+
RdKafkaRespErrNoError -> fmap headersFromList <$> extractHeaders headersPtr
115+
e -> return . Left $ e
116+
where extractHeaders ptHeaders =
117+
alloca $ \nptr ->
118+
alloca $ \vptr ->
119+
alloca $ \szptr ->
120+
let go acc idx = rdKafkaHeaderGetAll ptHeaders idx nptr vptr szptr >>= \case
121+
RdKafkaRespErrNoent -> return $ Right acc
122+
RdKafkaRespErrNoError -> do
123+
cstr <- peek nptr
124+
wptr <- peek vptr
125+
csize <- peek szptr
126+
hn <- BS.packCString cstr
127+
hv <- word8PtrToBS (fromIntegral csize) wptr
128+
go ((hn, hv) : acc) (idx + 1)
129+
_ -> error "Unexpected error code while extracting headers"
130+
in go [] 0
131+
105132
readBS :: (t -> Int) -> (t -> Ptr Word8) -> t -> IO (Maybe BS.ByteString)
106133
readBS flen fdata s = if fdata s == nullPtr
107134
then return Nothing

0 commit comments

Comments
 (0)