Skip to content

Commit c33a0cc

Browse files
authored
Merge pull request confluentinc#978 from confluentinc/prep152
Preps for v1.5.2 release
2 parents baee8a6 + 0b89441 commit c33a0cc

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

57 files changed

+130
-124
lines changed

.appveyor.yml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
environment:
22
global:
3-
LIBRDKAFKA_NUGET_VERSION: 1.5.0
3+
LIBRDKAFKA_NUGET_VERSION: 1.5.2
44
CIBW_SKIP: cp33-* cp34-*
55
CIBW_TEST_REQUIRES: pytest pytest-timeout requests trivup
66
# SDK v7.0 MSVC Express 2008's SetEnv.cmd script will fail if the
@@ -22,6 +22,7 @@ install:
2222
- SET PATH=%PYTHON%;%PYTHON%\\Scripts;%PATH%
2323
- python --version
2424
- python -m pip install -U pip
25+
- python -m pip install -U -r tests/requirements.txt
2526

2627
build_script:
2728
- tools/windows-build.bat

.travis.yml

Lines changed: 17 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,33 @@
11
env:
22
global:
3-
- LIBRDKAFKA_VERSION=v1.5.0
3+
- LIBRDKAFKA_VERSION=v1.5.2
44
jobs:
55
include:
6-
# Source package verification with Python 2.7
7-
- os: linux
6+
- name: "Source package verification with Python 2.7 (Linux)"
7+
os: linux
88
language: python
99
dist: trusty
1010
python: "2.7"
1111
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
12-
# Source package verification with Python 3.6
13-
- os: linux
12+
- name: "Source package verification with Python 3.6 (Linux)"
13+
os: linux
1414
language: python
1515
dist: trusty
1616
python: "3.6"
1717
env: LD_LIBRARY_PATH="$PWD/tmp-build/lib"
18-
# Source package verification with Python 2.7
19-
- os: osx
18+
- name: "Source package verification with Python 2.7 (OSX)"
19+
os: osx
2020
python: "2.7"
2121
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" INTERPRETER_VERSION="2.7.17"
22-
# Source package verification with Python 3.6
23-
- os: osx
22+
- name: "Source package verification with Python 3.6 (OSX) +docs"
23+
os: osx
2424
python: "3.6"
2525
env: DYLD_LIBRARY_PATH="$PWD/tmp-build/lib" MK_DOCS="y" INTERPRETER_VERSION="3.6.5"
26-
# cibuildwheel for osx
27-
- os: osx
26+
- name: "cibuildwheel (OSX)"
27+
os: osx
2828
env: CIBW_BEFORE_BUILD="tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp" CFLAGS="-Itmp/include" LDFLAGS="-Ltmp/lib" INTERPRETER_VERSION="2.7.17"
29-
# cibuildwheel for manylinux
30-
- os: linux
29+
- name: "cibuildwheel (manylinux)"
30+
os: linux
3131
dist: trusty
3232
env:
3333
- CIBW_BEFORE_BUILD="tools/prepare-cibuildwheel-linux.sh ${LIBRDKAFKA_VERSION}"
@@ -53,22 +53,20 @@ before_install:
5353
install:
5454
- tools/install-interceptors.sh
5555
- pip install -r tests/requirements.txt
56+
- pip install tox
5657
- flake8
5758
- if [[ $MK_DOCS == y ]]; then pip install -r docs/requirements.txt; fi
5859
- if [[ -z $CIBW_BEFORE_BUILD ]]; then tools/bootstrap-librdkafka.sh --require-ssl ${LIBRDKAFKA_VERSION} tmp-build && pip install --global-option=build_ext --global-option="-Itmp-build/include/" --global-option="-Ltmp-build/lib" . .[avro]; fi
5960

6061
# Build wheels
61-
script:
62-
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then cibuildwheel --output-dir wheelhouse1 && tools/fixup-wheels.sh wheelhouse1 wheelhouse; fi
63-
6462
# Make plugins available for tests
6563
# Execute tests if not CIBW_BEFORE_BUILD [osx, linux]
6664
# Execute integration tests if CIBW_BEFORE_BUILD
6765
# Build docs if MK_DOCS
68-
after_script:
66+
script:
67+
- if [[ -n $TRAVIS_TAG && -n $CIBW_BEFORE_BUILD ]]; then cibuildwheel --output-dir wheelhouse1 && tools/fixup-wheels.sh wheelhouse1 wheelhouse; fi
6968
- ldd staging/libs/* || otool -L staging/libs/* || true
70-
- if [[ -z $CIBW_BEFORE_BUILD && $TRAVIS_OS_NAME == "osx" ]]; then DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs py.test --timeout=60 --ignore=tmp-build --import-mode append; fi
71-
- if [[ -z $CIBW_BEFORE_BUILD && $TRAVIS_OS_NAME == "linux" ]]; then LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs py.test --timeout=60 --ignore=tmp-build --import-mode append; fi
69+
- [[ -n $CIBW_BEFORE_BUILD ]] || LD_LIBRARY_PATH=$LD_LIBRARY_PATH:staging/libs DYLD_LIBRARY_PATH=$DYLD_LIBRARY_PATH:staging/libs tox
7270
- if [[ -n $TRAVIS_TAG && $TRAVIS_OS_NAME == osx && -n $CIBW_BEFORE_BUILD ]]; then tools/test-wheel.sh wheelhouse; fi
7371
- if [[ $MK_DOCS == y ]]; then make docs; fi
7472

CHANGELOG.md

Lines changed: 23 additions & 0 deletions

MANIFEST.in

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
include README.md
22
include LICENSE.txt
33
include test-requirements.txt
4-
include confluent_kafka/src/*.[ch]
4+
include src/confluent_kafka/src/*.[ch]

docs/conf.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -57,9 +57,9 @@
5757
# built documents.
5858
#
5959
# The short X.Y version.
60-
version = '1.5.0'
60+
version = '1.5.2'
6161
# The full version, including alpha/beta/rc tags.
62-
release = '1.5.0'
62+
release = version
6363

6464
# The language for content autogenerated by Sphinx. Refer to documentation
6565
# for a list of supported languages.

examples/docker/Dockerfile.alpine

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ FROM alpine:3.12
3030

3131
COPY . /usr/src/confluent-kafka-python
3232

33-
ENV LIBRDKAFKA_VERSION v1.5.0
33+
ENV LIBRDKAFKA_VERSION v1.5.2
3434
ENV KAFKACAT_VERSION master
3535

3636

setup.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import platform
77

88
work_dir = os.path.dirname(os.path.realpath(__file__))
9-
mod_dir = os.path.join(work_dir, 'confluent_kafka')
9+
mod_dir = os.path.join(work_dir, 'src', 'confluent_kafka')
1010
ext_dir = os.path.join(mod_dir, 'src')
1111

1212
INSTALL_REQUIRES = [
@@ -64,13 +64,14 @@ def get_install_requirements(path):
6464
setup(name='confluent-kafka',
6565
# Make sure to bump CFL_VERSION* in confluent_kafka/src/confluent_kafka.h
6666
# and version and release in docs/conf.py.
67-
version='1.5.0',
67+
version='1.5.2',
6868
description='Confluent\'s Python client for Apache Kafka',
6969
author='Confluent Inc',
7070
author_email='[email protected]',
7171
url='https://github.com/confluentinc/confluent-kafka-python',
7272
ext_modules=[module],
73-
packages=find_packages(exclude=("tests", "tests.*")),
73+
packages=find_packages('src'),
74+
package_dir={'': 'src'},
7475
data_files=[('', [os.path.join(work_dir, 'LICENSE.txt')])],
7576
install_requires=INSTALL_REQUIRES,
7677
extras_require={
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

confluent_kafka/kafkatest/verifiable_client.py renamed to src/confluent_kafka/kafkatest/verifiable_client.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -86,10 +86,12 @@ def set_config(conf, args):
8686
if n == 'partition.assignment.strategy':
8787
# Convert Java class name to config value.
8888
# "org.apache.kafka.clients.consumer.RangeAssignor" -> "range"
89-
conf[n] = re.sub(r'org.apache.kafka.clients.consumer.(\w+)Assignor',
90-
lambda x: x.group(1).lower(), v)
91-
else:
92-
conf[n] = v
89+
v = re.sub(r'org.apache.kafka.clients.consumer.(\w+)Assignor',
90+
lambda x: x.group(1).lower(), v)
91+
if v == 'sticky':
92+
v = 'cooperative-sticky'
93+
94+
conf[n] = v
9395

9496
@staticmethod
9597
def read_config_file(path):
File renamed without changes.

confluent_kafka/src/Consumer.c renamed to src/confluent_kafka/src/Consumer.c

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -63,12 +63,8 @@ static void Consumer_dealloc (Handle *self) {
6363

6464
CallState_begin(self, &cs);
6565

66-
/* If application has not called c.close() then
67-
* rd_kafka_destroy() will, and that might trigger
68-
* callbacks to be called from consumer_close().
69-
* This should probably be fixed in librdkafka,
70-
* or the application. */
71-
rd_kafka_destroy(self->rk);
66+
rd_kafka_destroy_flags(self->rk,
67+
RD_KAFKA_DESTROY_F_NO_CONSUMER_CLOSE);
7268

7369
CallState_end(self, &cs);
7470
}
File renamed without changes.

confluent_kafka/src/Producer.c renamed to src/confluent_kafka/src/Producer.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -228,7 +228,7 @@ Producer_produce0 (Handle *self,
228228
static PyObject *Producer_produce (Handle *self, PyObject *args,
229229
PyObject *kwargs) {
230230
const char *topic, *value = NULL, *key = NULL;
231-
int value_len = 0, key_len = 0;
231+
Py_ssize_t value_len = 0, key_len = 0;
232232
int partition = RD_KAFKA_PARTITION_UA;
233233
PyObject *headers = NULL, *dr_cb = NULL, *dr_cb2 = NULL;
234234
long long timestamp = 0;

confluent_kafka/src/confluent_kafka.h renamed to src/confluent_kafka/src/confluent_kafka.h

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
* limitations under the License.
1515
*/
1616

17+
#define PY_SSIZE_T_CLEAN
1718
#include <Python.h>
1819
#include <structmember.h>
1920
#include <pythread.h>
@@ -41,8 +42,8 @@
4142
* 0xMMmmRRPP
4243
* MM=major, mm=minor, RR=revision, PP=patchlevel (not used)
4344
*/
44-
#define CFL_VERSION 0x01050000
45-
#define CFL_VERSION_STR "1.5.0"
45+
#define CFL_VERSION 0x01050200
46+
#define CFL_VERSION_STR "1.5.2"
4647

4748
/**
4849
* Minimum required librdkafka version. This is checked both during

tests/docker/.env

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
#!/usr/bin/env bash
22

3-
export DOCKER_SOURCE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
4-
export DOCKER_CONTEXT=$DOCKER_SOURCE/docker-compose.yaml
5-
export DOCKER_BIN=$DOCKER_SOURCE/bin
6-
export DOCKER_CONF=$DOCKER_SOURCE/conf
7-
export TLS=$DOCKER_CONF/tls
3+
export PY_DOCKER_SOURCE="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
4+
export PY_DOCKER_COMPOSE_FILE=$PY_DOCKER_SOURCE/docker-compose.yaml
5+
export PY_DOCKER_CONTEXT="python-test-$(uuidgen)"
6+
export PY_DOCKER_BIN=$PY_DOCKER_SOURCE/bin
7+
export PY_DOCKER_CONF=$PY_DOCKER_SOURCE/conf
8+
export TLS=$PY_DOCKER_CONF/tls
89

910
export MY_BOOTSTRAP_SERVER_ENV=localhost:29092
1011
export MY_SCHEMA_REGISTRY_URL_ENV=http://$(hostname):8081

tests/docker/bin/certify.sh

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,10 @@
22

33
set -eu
44

5-
DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
5+
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
66
export PASS="abcdefgh"
77

8-
source ${DOCKER_BIN}/../.env
8+
source ${PY_DOCKER_BIN}/../.env
99

1010
mkdir -p ${TLS}
1111

@@ -17,11 +17,11 @@ fi
1717
HOST=$(hostname -f)
1818

1919
echo "Creating ca-cert..."
20-
${DOCKER_BIN}/gen-ssl-certs.sh ca ${TLS}/ca-cert ${HOST}
20+
${PY_DOCKER_BIN}/gen-ssl-certs.sh ca ${TLS}/ca-cert ${HOST}
2121
echo "Creating server cert..."
22-
${DOCKER_BIN}/gen-ssl-certs.sh -k server ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
22+
${PY_DOCKER_BIN}/gen-ssl-certs.sh -k server ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
2323
echo "Creating client cert..."
24-
${DOCKER_BIN}/gen-ssl-certs.sh client ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
24+
${PY_DOCKER_BIN}/gen-ssl-certs.sh client ${TLS}/ca-cert ${TLS}/ ${HOST} ${HOST}
2525

2626
echo "Creating key ..."
2727
openssl rsa -in ${TLS}/client.key -out ${TLS}/client.key -passin pass:${PASS}

tests/docker/bin/cluster_down.sh

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
set -eu
44

5-
DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
6-
source ${DOCKER_BIN}/../.env
5+
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
6+
source ${PY_DOCKER_BIN}/../.env
77

88
echo "Destroying cluster.."
9-
docker-compose -f ${DOCKER_CONTEXT} down -v --remove-orphans
9+
docker-compose -f $PY_DOCKER_COMPOSE_FILE down -v --remove-orphans

tests/docker/bin/cluster_up.sh

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
set -eu
44

5-
DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
6-
source ${DOCKER_BIN}/../.env
5+
PY_DOCKER_BIN="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
6+
source ${PY_DOCKER_BIN}/../.env
77

88
# Wait for http service listener to come up and start serving
99
# $1 http service name
@@ -27,20 +27,21 @@ await_http() {
2727
}
2828

2929
echo "Configuring Environment..."
30-
source ${DOCKER_SOURCE}/.env
30+
source ${PY_DOCKER_SOURCE}/.env
3131

3232
echo "Generating SSL certs..."
33-
${DOCKER_BIN}/certify.sh
33+
${PY_DOCKER_BIN}/certify.sh
3434

3535
echo "Deploying cluster..."
36-
docker-compose -f ${DOCKER_CONTEXT} up -d
36+
docker-compose -f $PY_DOCKER_COMPOSE_FILE up -d
3737

3838
echo "Setting throttle for throttle test..."
39-
docker-compose -f ${DOCKER_CONTEXT} exec kafka sh -c "
39+
docker-compose -f $PY_DOCKER_COMPOSE_FILE exec kafka sh -c "
4040
/usr/bin/kafka-configs --zookeeper zookeeper:2181 \
4141
--alter --add-config 'producer_byte_rate=1,consumer_byte_rate=1,request_percentage=001' \
4242
--entity-name throttled_client --entity-type clients"
4343

4444
await_http "schema-registry" "http://localhost:8081"
45+
4546
await_http "schema-registry-basic-auth" "http://localhost:8083"
4647

tests/integration/conftest.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@
2828
def kafka_cluster():
2929

3030
cluster = TrivupFixture({'with_sr': True,
31+
'cp_version': 'latest',
3132
'broker_conf': ['transaction.state.log.replication.factor=1',
3233
'transaction.state.log.min.isr=1']})
3334
try:

tests/integration/consumer/test_consumer_error.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
#
1818

1919
import pytest
20-
from confluent_kafka.cimpl import TopicPartition, OFFSET_END
20+
from confluent_kafka import TopicPartition, OFFSET_END, KafkaError
2121

2222
from confluent_kafka.error import ConsumeError
2323
from confluent_kafka.serialization import StringSerializer
@@ -39,6 +39,8 @@ def test_consume_error(kafka_cluster):
3939
value_deserializer=StringSerializer())
4040
consumer.assign([TopicPartition(topic, 0, OFFSET_END)])
4141

42-
with pytest.raises(ConsumeError, match="No more messages"):
42+
with pytest.raises(ConsumeError) as exc_info:
4343
# Trigger EOF error
4444
consumer.poll()
45+
assert exc_info.value.args[0].code() == KafkaError._PARTITION_EOF, \
46+
"Expected _PARTITION_EOF, not {}".format(exc_info)

0 commit comments

Comments
 (0)