kafka SourceRecord 源码

  • 2022-10-20
  • 浏览 (324)

kafka SourceRecord 代码

文件路径:/connect/api/src/main/java/org/apache/kafka/connect/source/SourceRecord.java

/*
 * Licensed to the Apache Software Foundation (ASF) under one or more
 * contributor license agreements. See the NOTICE file distributed with
 * this work for additional information regarding copyright ownership.
 * The ASF licenses this file to You under the Apache License, Version 2.0
 * (the "License"); you may not use this file except in compliance with
 * the License. You may obtain a copy of the License at
 *
 *    http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.kafka.connect.source;

import org.apache.kafka.connect.connector.ConnectRecord;
import org.apache.kafka.connect.data.Schema;
import org.apache.kafka.connect.header.Header;

import java.util.Map;
import java.util.Objects;

/**
 * <p>
 * SourceRecords are generated by SourceTasks and passed to Kafka Connect for storage in
 * Kafka. In addition to the standard fields in {@link ConnectRecord} which specify where data is stored
 * in Kafka, they also include a sourcePartition and sourceOffset.
 * </p>
 * <p>
 * The sourcePartition represents a single input sourcePartition that the record came from (e.g. a filename, table
 * name, or topic-partition). The sourceOffset represents a position in that sourcePartition which can be used
 * to resume consumption of data.
 * </p>
 * <p>
 * These values can have arbitrary structure and should be represented using
 * org.apache.kafka.connect.data objects (or primitive values). For example, a database connector
 * might specify the sourcePartition as a record containing { "db": "database_name", "table":
 * "table_name"} and the sourceOffset as a Long containing the timestamp of the row.
 * </p>
 */
public class SourceRecord extends ConnectRecord<SourceRecord> {
    private final Map<String, ?> sourcePartition;
    private final Map<String, ?> sourceOffset;

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Integer partition, Schema valueSchema, Object value) {
        this(sourcePartition, sourceOffset, topic, partition, null, null, valueSchema, value);
    }

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Schema valueSchema, Object value) {
        this(sourcePartition, sourceOffset, topic, null, null, null, valueSchema, value);
    }

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Schema keySchema, Object key, Schema valueSchema, Object value) {
        this(sourcePartition, sourceOffset, topic, null, keySchema, key, valueSchema, value);
    }

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Integer partition,
                        Schema keySchema, Object key, Schema valueSchema, Object value) {
        this(sourcePartition, sourceOffset, topic, partition, keySchema, key, valueSchema, value, null);
    }

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Integer partition,
                        Schema keySchema, Object key,
                        Schema valueSchema, Object value,
                        Long timestamp) {
        this(sourcePartition, sourceOffset, topic, partition, keySchema, key, valueSchema, value, timestamp, null);
    }

    public SourceRecord(Map<String, ?> sourcePartition, Map<String, ?> sourceOffset,
                        String topic, Integer partition,
                        Schema keySchema, Object key,
                        Schema valueSchema, Object value,
                        Long timestamp, Iterable<Header> headers) {
        super(topic, partition, keySchema, key, valueSchema, value, timestamp, headers);
        this.sourcePartition = sourcePartition;
        this.sourceOffset = sourceOffset;
    }

    public Map<String, ?> sourcePartition() {
        return sourcePartition;
    }

    public Map<String, ?> sourceOffset() {
        return sourceOffset;
    }

    @Override
    public SourceRecord newRecord(String topic, Integer kafkaPartition, Schema keySchema, Object key, Schema valueSchema, Object value, Long timestamp) {
        return newRecord(topic, kafkaPartition, keySchema, key, valueSchema, value, timestamp, headers().duplicate());
    }

    @Override
    public SourceRecord newRecord(String topic, Integer kafkaPartition, Schema keySchema, Object key, Schema valueSchema, Object value,
                                  Long timestamp, Iterable<Header> headers) {
        return new SourceRecord(sourcePartition, sourceOffset, topic, kafkaPartition, keySchema, key, valueSchema, value, timestamp, headers);
    }

    @Override
    public boolean equals(Object o) {
        if (this == o)
            return true;
        if (o == null || getClass() != o.getClass())
            return false;
        if (!super.equals(o))
            return false;

        SourceRecord that = (SourceRecord) o;

        return Objects.equals(sourcePartition, that.sourcePartition) &&
                Objects.equals(sourceOffset, that.sourceOffset);
    }

    @Override
    public int hashCode() {
        int result = super.hashCode();
        result = 31 * result + (sourcePartition != null ? sourcePartition.hashCode() : 0);
        result = 31 * result + (sourceOffset != null ? sourceOffset.hashCode() : 0);
        return result;
    }

    @Override
    public String toString() {
        return "SourceRecord{" +
                "sourcePartition=" + sourcePartition +
                ", sourceOffset=" + sourceOffset +
                "} " + super.toString();
    }
}

相关信息

kafka 源码目录

相关文章

kafka ConnectorTransactionBoundaries 源码

kafka ExactlyOnceSupport 源码

kafka SourceConnector 源码

kafka SourceConnectorContext 源码

kafka SourceTask 源码

kafka SourceTaskContext 源码

kafka TransactionContext 源码

0  赞