Skip to content

Commit 7c60469

Browse files
committed
Merge branch 'master' into 0.8
Conflicts: README.md kafka-src kafka/client.py kafka/consumer.py kafka/protocol.py setup.py test/integration.py
2 parents 53da81c + bff6cae commit 7c60469

File tree

5 files changed

+62
-53
lines changed

5 files changed

+62
-53
lines changed

README.md

Lines changed: 57 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,61 @@
1-
# Kakfa Python client
1+
# Kafka Python client
22

3-
This module provides low-level protocol support Apache Kafka. It implements the five basic request types
4-
(and their responses): Produce, Fetch, MultiFetch, MultiProduce, and Offsets. Gzip and Snappy compression
5-
is also supported.
3+
This module provides low-level protocol support for Apache Kafka as well as
4+
high-level consumer and producer classes. Request batching is supported by the
5+
protocol as well as broker-aware request routing. Gzip and Snappy compression
6+
is also supported for message sets.
67

7-
Compatible with Apache Kafka 0.7x. Tested against 0.8
8+
Compatible with Apache Kafka 0.8.0
89

9-
http://incubator.apache.org/kafka/
10+
http://kafka.apache.org/
1011

1112
# License
1213

1314
Copyright 2013, David Arthur under Apache License, v2.0. See `LICENSE`
1415

1516
# Status
1617

17-
Current version is 0.2-alpha. This version is under development, APIs are subject to change
18+
I'm following the version numbers of Kafka, plus one number to indicate the
19+
version of this project. The current version is 0.8.0-1. This version is under
20+
development, APIs are subject to change.
21+
22+
# Usage
23+
24+
## High level
25+
26+
```python
27+
from kafka.client import KafkaClient
28+
from kafka.consumer import SimpleConsumer
29+
from kafka.producer import SimpleProducer
30+
31+
kafka = KafkaClient("localhost", 9092)
32+
33+
producer = SimpleProducer(kafka, "my-topic")
34+
producer.send_messages("some message")
35+
producer.send_messages("this method", "is variadic")
36+
37+
consumer = SimpleConsumer(kafka, "my-group", "my-topic")
38+
for message in consumer:
39+
print(message)
40+
41+
kafka.close()
42+
```
43+
44+
## Low level
45+
46+
```python
47+
from kafka.client import KafkaClient
48+
kafka = KafkaClient("localhost", 9092)
49+
req = ProduceRequest(topic="my-topic", partition=1,
50+
messages=[KafkaProdocol.encode_message("some message")])
51+
resps = kafka.send_produce_request(payloads=[req], fail_on_error=True)
52+
kafka.close()
53+
54+
resps[0].topic # "my-topic"
55+
resps[0].partition # 1
56+
resps[0].error # 0 (hopefully)
57+
resps[0].offset # offset of the first message sent in this request
58+
```
1859

1960
# Install
2061

@@ -60,11 +101,14 @@ pip install python-snappy
60101

61102
# Tests
62103

63-
Some of the tests will fail if Snappy is not installed. These tests will throw NotImplementedError. If you see other failures,
64-
they might be bugs - so please report them!
104+
Some of the tests will fail if Snappy is not installed. These tests will throw
105+
NotImplementedError. If you see other failures, they might be bugs - so please
106+
report them!
65107

66108
## Run the unit tests
67109

110+
_These are broken at the moment_
111+
68112
```shell
69113
python -m test.unit
70114
```
@@ -81,46 +125,11 @@ cd kafka-src
81125
./sbt package
82126
```
83127

84-
Then from the root directory, run the integration tests
128+
Next start up a ZooKeeper server on localhost:2181
85129

86130
```shell
87-
python -m test.integration
88-
```
89-
90-
# Usage
91-
92-
## High level
93-
94-
```python
95-
from kafka.client import KafkaClient
96-
from kafka.consumer import SimpleConsumer
97-
from kafka.producer import SimpleProducer
98-
99-
kafka = KafkaClient("localhost", 9092)
100-
101-
producer = SimpleProducer(kafka, "my-topic")
102-
producer.send_messages("some message")
103-
producer.send_messages("this method", "is variadic")
104-
105-
consumer = SimpleConsumer(kafka, "my-group", "my-topic")
106-
for message in consumer:
107-
print(message)
108-
109-
kafka.close()
131+
/opt/zookeeper/bin/zkServer.sh start
110132
```
111133

112-
## Low level
113-
114-
```python
115-
from kafka.client import KafkaClient
116-
kafka = KafkaClient("localhost", 9092)
117-
req = ProduceRequest(topic="my-topic", partition=1,
118-
messages=[KafkaProdocol.encode_message("some message")])
119-
resps = kafka.send_produce_request(payloads=[req], fail_on_error=True)
120-
kafka.close()
121-
122-
resps[0].topic # "my-topic"
123-
resps[0].partition # 1
124-
resps[0].error # 0 (hopefully)
125-
resps[0].offset # offset of the first message sent in this request
126-
```
134+
This will actually start up real Kafka brokers and send messages in using the
135+
client.

kafka/client.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -233,7 +233,7 @@ def send_offset_commit_request(self, group, payloads=[], fail_on_error=True, cal
233233
def send_offset_fetch_request(self, group, payloads=[], fail_on_error=True, callback=None):
234234
raise NotImplementedError("Broker-managed offsets not supported in 0.8")
235235
resps = self._send_broker_aware_request(payloads,
236-
partial(KafkaProtocol.encode_offset_commit_fetch, group=group),
236+
partial(KafkaProtocol.encode_offset_fetch_request, group=group),
237237
KafkaProtocol.decode_offset_fetch_response)
238238
out = []
239239
for resp in resps:

kafka/protocol.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -354,7 +354,6 @@ def decode_offset_commit_response(cls, data):
354354
======
355355
data: bytes to decode
356356
"""
357-
data = data[2:] # TODO remove me when versionId is removed
358357
((correlation_id,), cur) = relative_unpack('>i', data, 0)
359358
(client_id, cur) = read_short_string(data, cur)
360359
((num_topics,), cur) = relative_unpack('>i', data, cur)
@@ -398,7 +397,6 @@ def decode_offset_fetch_response(cls, data):
398397
data: bytes to decode
399398
"""
400399

401-
data = data[2:] # TODO remove me when versionId is removed
402400
((correlation_id,), cur) = relative_unpack('>i', data, 0)
403401
(client_id, cur) = read_short_string(data, cur)
404402
((num_topics,), cur) = relative_unpack('>i', data, cur)

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
setup(
44
name="kafka-python",
5-
version="0.2-alpha",
5+
version="0.8.0-1",
66
author="David Arthur",
77
author_email="[email protected]",
88
url="https://github.com/mumrah/kafka-python",

test/integration.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
import shutil
77
import socket
88
import subprocess
9+
import sys
910
import tempfile
1011
from threading import Thread, Event
1112
import time
@@ -73,7 +74,8 @@ def run(self):
7374
args = shlex.split("java -cp %s org.apache.zookeeper.ZooKeeperMain create /%s kafka-python" % (cp, self.zk_chroot))
7475
proc = subprocess.Popen(args)
7576
ret = proc.wait()
76-
assert ret == 0
77+
if ret != 0:
78+
sys.exit(1)
7779

7880

7981
# Start Kafka

0 commit comments

Comments
 (0)