Skip to content

Commit 077dc47

Browse files
committed
Merge pull request #515 from dpkp/kafka_producer
KafkaProducer
2 parents 48e9682 + 85c0dd2 commit 077dc47

17 files changed

+2163
-313
lines changed

README.rst

Lines changed: 28 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,34 @@ for examples.
5050
KafkaProducer
5151
*************
5252

53-
<`in progress - see SimpleProducer for legacy producer implementation`>
53+
KafkaProducer is a high-level, asynchronous message producer. The class is
54+
intended to operate as similarly as possible to the official java client.
55+
See `ReadTheDocs <http://kafka-python.readthedocs.org/en/master/apidoc/KafkaProducer.html>`_
56+
for more details.
57+
58+
>>> from kafka import KafkaProducer
59+
>>> producer = KafkaProducer(bootstrap_servers='localhost:1234')
60+
>>> producer.send('foobar', b'some_message_bytes')
61+
62+
>>> # Blocking send
63+
>>> producer.send('foobar', b'another_message').get(timeout=60)
64+
65+
>>> # Use a key for hashed-partitioning
66+
>>> producer.send('foobar', key=b'foo', value=b'bar')
67+
68+
>>> # Serialize json messages
69+
>>> import json
70+
>>> producer = KafkaProducer(value_serializer=json.loads)
71+
>>> producer.send('fizzbuzz', {'foo': 'bar'})
72+
73+
>>> # Serialize string keys
74+
>>> producer = KafkaProducer(key_serializer=str.encode)
75+
>>> producer.send('flipflap', key='ping', value=b'1234')
76+
77+
>>> # Compress messages
78+
>>> producer = KafkaProducer(compression_type='gzip')
79+
>>> for i in range(1000):
80+
... producer.send('foobar', b'msg %d' % i)
5481

5582

5683
Protocol

docs/apidoc/KafkaProducer.rst

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
KafkaProducer
22
=============
33

4-
<unreleased> See :class:`kafka.producer.SimpleProducer`
4+
.. autoclass:: kafka.KafkaProducer
5+
:members:

kafka/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
__copyright__ = 'Copyright 2016 Dana Powers, David Arthur, and Contributors'
66

77
from kafka.consumer import KafkaConsumer
8+
from kafka.producer import KafkaProducer
89
from kafka.conn import BrokerConnection
910
from kafka.protocol import (
1011
create_message, create_gzip_message, create_snappy_message)
@@ -28,7 +29,7 @@ def __init__(self, *args, **kwargs):
2829

2930

3031
__all__ = [
31-
'KafkaConsumer', 'KafkaClient', 'BrokerConnection',
32+
'KafkaConsumer', 'KafkaProducer', 'KafkaClient', 'BrokerConnection',
3233
'SimpleClient', 'SimpleProducer', 'KeyedProducer',
3334
'RoundRobinPartitioner', 'HashedPartitioner',
3435
'create_message', 'create_gzip_message', 'create_snappy_message',

kafka/partitioner/default.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import random
2+
3+
from .hashed import murmur2
4+
5+
6+
class DefaultPartitioner(object):
7+
"""Default partitioner.
8+
9+
Hashes key to partition using murmur2 hashing (from java client)
10+
If key is None, selects partition randomly from available,
11+
or from all partitions if none are currently available
12+
"""
13+
@classmethod
14+
def __call__(cls, key, all_partitions, available):
15+
if key is None:
16+
if available:
17+
return random.choice(available)
18+
return random.choice(all_partitions)
19+
20+
idx = murmur2(key)
21+
idx &= 0x7fffffff
22+
idx %= len(all_partitions)
23+
return all_partitions[idx]

kafka/producer/__init__.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
from .kafka import KafkaProducer
12
from .simple import SimpleProducer
23
from .keyed import KeyedProducer
34

45
__all__ = [
5-
'SimpleProducer', 'KeyedProducer'
6+
'KafkaProducer',
7+
'SimpleProducer', 'KeyedProducer' # deprecated
68
]

0 commit comments

Comments
 (0)