Skip to content
Permalink

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also or learn more about diff comparisons.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also . Learn more about diff comparisons here.
base repository: symfony/var-dumper
Failed to load repositories. Confirm that selected base ref is valid, then try again.
Loading
base: v5.2.3
Choose a base ref
...
head repository: symfony/var-dumper
Failed to load repositories. Confirm that selected head ref is valid, then try again.
Loading
compare: v5.2.4
Choose a head ref
  • 3 commits
  • 1 file changed
  • 1 contributor

Commits on Feb 18, 2021

  1. Copy the full SHA
    92f1856 View commit details
  2. Verified

    This commit was signed with the committer’s verified signature.
    targos Michaël Zasso
    Copy the full SHA
    2db5e07 View commit details
  3. Verified

    This commit was signed with the committer’s verified signature.
    targos Michaël Zasso
    Copy the full SHA
    6a81fec View commit details
Showing with 2 additions and 68 deletions.
  1. +2 −68 Tests/Caster/RdKafkaCasterTest.php
70 changes: 2 additions & 68 deletions Tests/Caster/RdKafkaCasterTest.php
Original file line number Diff line number Diff line change
@@ -56,75 +56,9 @@ public function testDumpConf()

$expectedDump = <<<EODUMP
RdKafka\Conf {
builtin.features: "gzip,snappy,ssl,sasl,regex,lz4,sasl_gssapi,sasl_plain,sasl_scram,plugins"
builtin.features: "gzip,snappy,ssl,sasl,regex,lz4,sasl_gssapi,sasl_plain,sasl_scram,plugins%S"
client.id: "rdkafka"
message.max.bytes: "1000000"
message.copy.max.bytes: "65535"
receive.message.max.bytes: "100000000"
max.in.flight.requests.per.connection: "1000000"
metadata.request.timeout.ms: "60000"
topic.metadata.refresh.interval.ms: "300000"
metadata.max.age.ms: "-1"
topic.metadata.refresh.fast.interval.ms: "250"
topic.metadata.refresh.fast.cnt: "10"
topic.metadata.refresh.sparse: "true"
debug: ""
socket.timeout.ms: "60000"
socket.blocking.max.ms: "1000"
socket.send.buffer.bytes: "0"
socket.receive.buffer.bytes: "0"
socket.keepalive.enable: "false"
socket.nagle.disable: "false"
socket.max.fails: "%d"
broker.address.ttl: "1000"
broker.address.family: "any"
reconnect.backoff.jitter.ms: "500"
statistics.interval.ms: "0"
enabled_events: "0"
error_cb: "0x%x"
%A
log_level: "6"
log.queue: "%s"
log.thread.name: "true"
log.connection.close: "true"
socket_cb: "0x%x"
open_cb: "0x%x"
internal.termination.signal: "0"
api.version.request: "true"
api.version.request.timeout.ms: "10000"
api.version.fallback.ms: "1200000"
broker.version.fallback: "0.9.0"
security.protocol: "plaintext"
sasl.mechanisms: "GSSAPI"
sasl.kerberos.service.name: "kafka"
sasl.kerberos.principal: "kafkaclient"
sasl.kerberos.kinit.cmd: "kinit -S "%{sasl.kerberos.service.name}/%{broker.name}" -k -t "%{sasl.kerberos.keytab}" %{sasl.kerberos.principal}"
sasl.kerberos.min.time.before.relogin: "60000"
partition.assignment.strategy: "range,roundrobin"
session.timeout.ms: "30000"
heartbeat.interval.ms: "1000"
group.protocol.type: "consumer"
coordinator.query.interval.ms: "600000"
enable.auto.commit: "true"
auto.commit.interval.ms: "5000"
enable.auto.offset.store: "true"
queued.min.messages: "100000"
queued.max.messages.kbytes: "1048576"
fetch.wait.max.ms: "100"
%A
fetch.min.bytes: "1"
fetch.error.backoff.ms: "500"
offset.store.method: "broker"
%A
enable.partition.eof: "true"
check.crcs: "false"
queue.buffering.max.messages: "100000"
queue.buffering.max.kbytes: "1048576"
queue.buffering.max.ms: "0"
%A
compression.codec: "none"
batch.num.messages: "10000"
delivery.report.only.error: "false"
dr_msg_cb: "0x%x"
}
EODUMP;
@@ -180,7 +114,7 @@ public function testDumpTopicConf()

$expectedDump = <<<EODUMP
RdKafka\TopicConf {
request.required.acks: "1"
request.required.acks: "%i"
request.timeout.ms: "5000"
message.timeout.ms: "300000"
%A