Skip to content

Commit c0d2cac

Browse files
committed
Fix #44 Add missing exception class
Also move the exceptions to common instead of util
1 parent 9af7b81 commit c0d2cac

File tree

6 files changed

+25
-17
lines changed

6 files changed

+25
-17
lines changed

CHANGES.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,3 +5,5 @@
55
* Adding fetch_size_bytes to SimpleConsumer constructor to allow for user-configurable fetch sizes
66

77
* Allow SimpleConsumer to automatically increase the fetch size if a partial message is read and no other messages were read during that fetch request. The increase factor is 1.5
8+
9+
* Exception classes moved to kafka.common

kafka/common.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -64,3 +64,19 @@ class ErrorMapping(object):
6464
MESSAGE_SIZE_TO_LARGE = 10
6565
STALE_CONTROLLER_EPOCH = 11
6666
OFFSET_METADATA_TOO_LARGE = 12
67+
68+
#################
69+
# Exceptions #
70+
#################
71+
72+
class BufferUnderflowError(Exception):
73+
pass
74+
75+
class ChecksumError(Exception):
76+
pass
77+
78+
class ConsumerFetchSizeTooSmall(Exception):
79+
pass
80+
81+
class ConsumerNoMoreData(Exception):
82+
pass

kafka/consumer.py

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,11 @@
88

99
from kafka.common import (
1010
ErrorMapping, FetchRequest,
11-
OffsetRequest, OffsetFetchRequest, OffsetCommitRequest
11+
OffsetRequest, OffsetFetchRequest, OffsetCommitRequest,
12+
ConsumerFetchSizeTooSmall, ConsumerNoMoreData
1213
)
1314

14-
from kafka.util import (
15-
ReentrantTimer, ConsumerFetchSizeTooSmall
16-
)
15+
from kafka.util import ReentrantTimer
1716

1817
log = logging.getLogger("kafka")
1918

kafka/protocol.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,12 @@
88
from kafka.common import (
99
BrokerMetadata, PartitionMetadata, Message, OffsetAndMessage,
1010
ProduceResponse, FetchResponse, OffsetResponse,
11-
OffsetCommitResponse, OffsetFetchResponse
11+
OffsetCommitResponse, OffsetFetchResponse,
12+
BufferUnderflowError, ChecksumError, ConsumerFetchSizeTooSmall
1213
)
1314
from kafka.util import (
1415
read_short_string, read_int_string, relative_unpack,
15-
write_short_string, write_int_string, group_by_topic_and_partition,
16-
BufferUnderflowError, ChecksumError, ConsumerFetchSizeTooSmall
16+
write_short_string, write_int_string, group_by_topic_and_partition
1717
)
1818

1919
log = logging.getLogger("kafka")

kafka/queue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
from Queue import Empty
55
import time
66

7-
from .client import KafkaClient, FetchRequest, ProduceRequest
7+
from kafka.client import KafkaClient, FetchRequest, ProduceRequest
88

99
log = logging.getLogger("kafka")
1010

kafka/util.py

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -66,15 +66,6 @@ def group_by_topic_and_partition(tuples):
6666
return out
6767

6868

69-
class BufferUnderflowError(Exception):
70-
pass
71-
72-
73-
class ChecksumError(Exception):
74-
pass
75-
76-
class ConsumerFetchSizeTooSmall(Exception):
77-
pass
7869

7970
class ReentrantTimer(object):
8071
"""

0 commit comments

Comments
 (0)