mirror of
https://github.com/nestjs/nest.git
synced 2026-02-21 23:11:44 +00:00
KafkaParser options to keep binary payloads without utf8 encoding
This commit is contained in:
8
package-lock.json
generated
8
package-lock.json
generated
@@ -6270,7 +6270,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/apollo-env/-/apollo-env-0.6.6.tgz",
|
"resolved": "https://registry.npmjs.org/apollo-env/-/apollo-env-0.6.6.tgz",
|
||||||
"integrity": "sha512-hXI9PjJtzmD34XviBU+4sPMOxnifYrHVmxpjykqI/dUD2G3yTiuRaiQqwRwB2RCdwC1Ug/jBfoQ/NHDTnnjndQ==",
|
"integrity": "sha512-hXI9PjJtzmD34XviBU+4sPMOxnifYrHVmxpjykqI/dUD2G3yTiuRaiQqwRwB2RCdwC1Ug/jBfoQ/NHDTnnjndQ==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"optional": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"@types/node-fetch": "2.5.7",
|
"@types/node-fetch": "2.5.7",
|
||||||
"core-js": "^3.0.1",
|
"core-js": "^3.0.1",
|
||||||
@@ -11482,8 +11481,7 @@
|
|||||||
"version": "0.0.10",
|
"version": "0.0.10",
|
||||||
"resolved": "https://registry.npmjs.org/cssfilter/-/cssfilter-0.0.10.tgz",
|
"resolved": "https://registry.npmjs.org/cssfilter/-/cssfilter-0.0.10.tgz",
|
||||||
"integrity": "sha1-xtJnJjKi5cg+AT5oZKQs6N79IK4=",
|
"integrity": "sha1-xtJnJjKi5cg+AT5oZKQs6N79IK4=",
|
||||||
"dev": true,
|
"dev": true
|
||||||
"optional": true
|
|
||||||
},
|
},
|
||||||
"csv-parse": {
|
"csv-parse": {
|
||||||
"version": "4.15.3",
|
"version": "4.15.3",
|
||||||
@@ -27111,7 +27109,6 @@
|
|||||||
"resolved": "https://registry.npmjs.org/xss/-/xss-1.0.8.tgz",
|
"resolved": "https://registry.npmjs.org/xss/-/xss-1.0.8.tgz",
|
||||||
"integrity": "sha512-3MgPdaXV8rfQ/pNn16Eio6VXYPTkqwa0vc7GkiymmY/DqR1SE/7VPAAVZz1GJsJFrllMYO3RHfEaiUGjab6TNw==",
|
"integrity": "sha512-3MgPdaXV8rfQ/pNn16Eio6VXYPTkqwa0vc7GkiymmY/DqR1SE/7VPAAVZz1GJsJFrllMYO3RHfEaiUGjab6TNw==",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"optional": true,
|
|
||||||
"requires": {
|
"requires": {
|
||||||
"commander": "^2.20.3",
|
"commander": "^2.20.3",
|
||||||
"cssfilter": "0.0.10"
|
"cssfilter": "0.0.10"
|
||||||
@@ -27121,8 +27118,7 @@
|
|||||||
"version": "2.20.3",
|
"version": "2.20.3",
|
||||||
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
|
"resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
|
||||||
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
|
"integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
|
||||||
"dev": true,
|
"dev": true
|
||||||
"optional": true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -40,10 +40,11 @@ import { ClientProxy } from './client-proxy';
|
|||||||
let kafkaPackage: any = {};
|
let kafkaPackage: any = {};
|
||||||
|
|
||||||
export class ClientKafka extends ClientProxy {
|
export class ClientKafka extends ClientProxy {
|
||||||
|
protected logger = new Logger(ClientKafka.name);
|
||||||
protected client: Kafka = null;
|
protected client: Kafka = null;
|
||||||
protected consumer: Consumer = null;
|
protected consumer: Consumer = null;
|
||||||
protected producer: Producer = null;
|
protected producer: Producer = null;
|
||||||
protected logger = new Logger(ClientKafka.name);
|
protected parser: KafkaParser = null;
|
||||||
protected responsePatterns: string[] = [];
|
protected responsePatterns: string[] = [];
|
||||||
protected consumerAssignments: { [key: string]: number } = {};
|
protected consumerAssignments: { [key: string]: number } = {};
|
||||||
|
|
||||||
@@ -73,6 +74,8 @@ export class ClientKafka extends ClientProxy {
|
|||||||
require('kafkajs'),
|
require('kafkajs'),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.parser = new KafkaParser((options && options.parser) || undefined);
|
||||||
|
|
||||||
this.initializeSerializer(options);
|
this.initializeSerializer(options);
|
||||||
this.initializeDeserializer(options);
|
this.initializeDeserializer(options);
|
||||||
}
|
}
|
||||||
@@ -153,7 +156,7 @@ export class ClientKafka extends ClientProxy {
|
|||||||
|
|
||||||
public createResponseCallback(): (payload: EachMessagePayload) => any {
|
public createResponseCallback(): (payload: EachMessagePayload) => any {
|
||||||
return (payload: EachMessagePayload) => {
|
return (payload: EachMessagePayload) => {
|
||||||
const rawMessage = KafkaParser.parse<KafkaMessage>(
|
const rawMessage = this.parser.parse<KafkaMessage>(
|
||||||
Object.assign(payload.message, {
|
Object.assign(payload.message, {
|
||||||
topic: payload.topic,
|
topic: payload.topic,
|
||||||
partition: payload.partition,
|
partition: payload.partition,
|
||||||
|
|||||||
@@ -1,8 +1,17 @@
|
|||||||
import { isNil } from '@nestjs/common/utils/shared.utils';
|
import { isNil } from '@nestjs/common/utils/shared.utils';
|
||||||
|
import { KafkaParserConfig } from '../interfaces';
|
||||||
|
|
||||||
export class KafkaParser {
|
export class KafkaParser {
|
||||||
public static parse<T = any>(data: any): T {
|
protected readonly keepBinary: boolean;
|
||||||
data.value = this.decode(data.value);
|
|
||||||
|
constructor(config?: KafkaParserConfig) {
|
||||||
|
this.keepBinary = (config && config.keepBinary) || false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public parse<T = any>(data: any): T {
|
||||||
|
if (!this.keepBinary) {
|
||||||
|
data.value = this.decode(data.value);
|
||||||
|
}
|
||||||
|
|
||||||
if (!isNil(data.key)) {
|
if (!isNil(data.key)) {
|
||||||
data.key = this.decode(data.key);
|
data.key = this.decode(data.key);
|
||||||
@@ -18,7 +27,7 @@ export class KafkaParser {
|
|||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static decode(value: Buffer): object | string | null {
|
public decode(value: Buffer): object | string | null {
|
||||||
if (isNil(value)) {
|
if (isNil(value)) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -157,6 +157,10 @@ export interface RmqOptions {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface KafkaParserConfig {
|
||||||
|
keepBinary?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
export interface KafkaOptions {
|
export interface KafkaOptions {
|
||||||
transport?: Transport.KAFKA;
|
transport?: Transport.KAFKA;
|
||||||
options?: {
|
options?: {
|
||||||
@@ -169,5 +173,6 @@ export interface KafkaOptions {
|
|||||||
send?: Omit<ProducerRecord, 'topic' | 'messages'>;
|
send?: Omit<ProducerRecord, 'topic' | 'messages'>;
|
||||||
serializer?: Serializer;
|
serializer?: Serializer;
|
||||||
deserializer?: Deserializer;
|
deserializer?: Deserializer;
|
||||||
|
parser?: KafkaParserConfig;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ export class ServerKafka extends Server implements CustomTransportStrategy {
|
|||||||
protected client: Kafka = null;
|
protected client: Kafka = null;
|
||||||
protected consumer: Consumer = null;
|
protected consumer: Consumer = null;
|
||||||
protected producer: Producer = null;
|
protected producer: Producer = null;
|
||||||
|
protected parser: KafkaParser = null;
|
||||||
|
|
||||||
protected brokers: string[] | BrokersFunction;
|
protected brokers: string[] | BrokersFunction;
|
||||||
protected clientId: string;
|
protected clientId: string;
|
||||||
@@ -66,6 +67,8 @@ export class ServerKafka extends Server implements CustomTransportStrategy {
|
|||||||
require('kafkajs'),
|
require('kafkajs'),
|
||||||
);
|
);
|
||||||
|
|
||||||
|
this.parser = new KafkaParser((options && options.parser) || undefined);
|
||||||
|
|
||||||
this.initializeSerializer(options);
|
this.initializeSerializer(options);
|
||||||
this.initializeDeserializer(options);
|
this.initializeDeserializer(options);
|
||||||
}
|
}
|
||||||
@@ -137,7 +140,7 @@ export class ServerKafka extends Server implements CustomTransportStrategy {
|
|||||||
|
|
||||||
public async handleMessage(payload: EachMessagePayload) {
|
public async handleMessage(payload: EachMessagePayload) {
|
||||||
const channel = payload.topic;
|
const channel = payload.topic;
|
||||||
const rawMessage = KafkaParser.parse<KafkaMessage>(
|
const rawMessage = this.parser.parse<KafkaMessage>(
|
||||||
Object.assign(payload.message, {
|
Object.assign(payload.message, {
|
||||||
topic: payload.topic,
|
topic: payload.topic,
|
||||||
partition: payload.partition,
|
partition: payload.partition,
|
||||||
|
|||||||
Reference in New Issue
Block a user