Skip to content

Commit

Permalink
fix(3.0.0): ExtendableObject update property name pattern
Browse files Browse the repository at this point in the history
  • Loading branch information
Pakisan committed Feb 15, 2024
1 parent e836aac commit 32dd46d
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 15 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@
@JsonIgnoreProperties({"extensionFields"})
public class ExtendableObject {

private static final Pattern extensionPropertyNamePattern = Pattern.compile("^x-[\\w\\d\\-\\_]+$");
private static final Pattern extensionPropertyNamePattern = Pattern.compile("^x-[\\w.\\x2d_]+$");

/**
* Extension fields in the form x-extension-field-name for the exposed API.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,11 +93,11 @@ class AdeoKafkaRequestReplyAsyncAPI: AbstractExampleValidationTest() {
override fun expectedChannels(): Map<String, Any> {
val costingResponseChannelKafkaBinding = KafkaChannelBinding()
costingResponseChannelKafkaBinding.extensionFields = mapOf(
Pair("x-key-subject-name-strategy", mapOf(
Pair("x-key.subject.name.strategy", mapOf(
Pair("type", "string"),
Pair("description", "We use the RecordNameStrategy to infer the messages schema. Use `key.subject.name.strategy=io.confluent.kafka.serializers.subject.RecordNameStrategy` in your consumer configuration.\n"),
)),
Pair("x-value-subject-name-strategy", mapOf(
Pair("x-value.subject.name.strategy", mapOf(
Pair("type", "string"),
Pair("description", "We use the RecordNameStrategy to infer the messages schema. Use `value.subject.name.strategy=io.confluent.kafka.serializers.subject.RecordNameStrategy` in your consumer configuration.\n"),
))
Expand Down Expand Up @@ -175,7 +175,7 @@ class AdeoKafkaRequestReplyAsyncAPI: AbstractExampleValidationTest() {
)
.build()
receiveACostingRequestKafkaBinding.extensionFields = mapOf(
Pair("x-value-subject-name-strategy", mapOf(
Pair("x-value.subject.name.strategy", mapOf(
Pair("type", "string"),
Pair("description", "We use the RecordNameStrategy to infer the messages schema. Use `value.subject.name.strategy=io.confluent.kafka.serializers.subject.RecordNameStrategy` in your producer configuration.\n"
),
Expand Down Expand Up @@ -213,11 +213,11 @@ class AdeoKafkaRequestReplyAsyncAPI: AbstractExampleValidationTest() {
"to connect to the ADEO Broker."
)
saslSslSecurityScheme.extensionFields = mapOf(
Pair("x-sasl-jaas-config", "org.apache.kafka.common.security.plain.PlainLoginModule required " +
Pair("x-sasl.jaas.config", "org.apache.kafka.common.security.plain.PlainLoginModule required " +
"username=\"<CLUSTER_API_KEY>\" password=\"<CLUSTER_API_SECRET>\";"),
Pair("x-security-protocol", "SASL_SSL"),
Pair("x-ssl-endpoint-identification-algorithm", "https"),
Pair("x-sasl-mechanism", "PLAIN")
Pair("x-security.protocol", "SASL_SSL"),
Pair("x-ssl.endpoint.identification.algorithm", "https"),
Pair("x-sasl.mechanism", "PLAIN")
)

return Components.builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,13 +87,13 @@ channels:
bindings:
kafka:
x-key-subject-name-strategy:
x-key.subject.name.strategy:
type: string
description: >
We use the RecordNameStrategy to infer the messages schema. Use
`key.subject.name.strategy=io.confluent.kafka.serializers.subject.RecordNameStrategy`
in your consumer configuration.
x-value-subject-name-strategy:
x-value.subject.name.strategy:
type: string
description: >
We use the RecordNameStrategy to infer the messages schema. Use
Expand Down Expand Up @@ -128,7 +128,7 @@ operations:
description: >
The groupId must be prefixed by your `svc` account, deliver by the
Adeo Kafka team. This `svc` must have the write access to the topic.
x-value-subject-name-strategy:
x-value.subject.name.strategy:
type: string
description: >
We use the RecordNameStrategy to infer the messages schema. Use
Expand Down Expand Up @@ -286,12 +286,12 @@ components:
securitySchemes:
sasl-ssl:
type: plain
x-sasl-jaas-config: >-
x-sasl.jaas.config: >-
org.apache.kafka.common.security.plain.PlainLoginModule required
username="<CLUSTER_API_KEY>" password="<CLUSTER_API_SECRET>";
x-security-protocol: SASL_SSL
x-ssl-endpoint-identification-algorithm: https
x-sasl-mechanism: PLAIN
x-security.protocol: SASL_SSL
x-ssl.endpoint.identification.algorithm: https
x-sasl.mechanism: PLAIN
description: >
Use [SASL authentication with SSL
encryption](https://docs.confluent.io/platform/current/security/security_tutorial.html#configure-clients)
Expand Down

0 comments on commit 32dd46d

Please sign in to comment.