Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[compaction] Refact the file scan source of combine mode for supporting unaware logic #3179

Closed
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.append;

import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.io.DataFileMeta;

import java.util.List;
import java.util.Objects;

/** Compaction task for multi table . */
public class MultiTableAppendOnlyCompactionTask extends AppendOnlyCompactionTask {
private final Identifier tableIdentifier;

public MultiTableAppendOnlyCompactionTask(
BinaryRow partition, List<DataFileMeta> files, Identifier identifier) {
super(partition, files);
this.tableIdentifier = identifier;
}

public Identifier tableIdentifier() {
return tableIdentifier;
}

public int hashCode() {
return Objects.hash(partition(), compactBefore(), compactAfter(), tableIdentifier);
}

@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}

MultiTableAppendOnlyCompactionTask that = (MultiTableAppendOnlyCompactionTask) o;
return Objects.equals(partition(), that.partition())
&& Objects.equals(compactBefore(), that.compactBefore())
&& Objects.equals(compactAfter(), that.compactAfter())
&& Objects.equals(tableIdentifier, that.tableIdentifier);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
package org.apache.paimon.catalog;

import org.apache.paimon.annotation.Public;
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.types.RowType;
import org.apache.paimon.utils.StringUtils;

import java.io.Serializable;
Expand Down Expand Up @@ -109,4 +111,8 @@ public int hashCode() {
public String toString() {
return "Identifier{" + "database='" + database + '\'' + ", table='" + table + '\'' + '}';
}

public static RowType schema() {
return RowType.builder().fields(DataTypes.STRING(), DataTypes.STRING()).build();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.io;

import org.apache.paimon.catalog.Identifier;
import org.apache.paimon.data.BinaryString;
import org.apache.paimon.data.GenericRow;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.utils.ObjectSerializer;

/** Serializer for {@link Identifier}. */
public class IdentifierSerializer extends ObjectSerializer<Identifier> {

public IdentifierSerializer() {
super(Identifier.schema());
}

@Override
public InternalRow toRow(Identifier record) {
return GenericRow.of(
BinaryString.fromString(record.getDatabaseName()),
BinaryString.fromString(record.getObjectName()));
}

@Override
public Identifier fromRow(InternalRow rowData) {
String databaseName = rowData.isNullAt(0) ? null : rowData.getString(0).toString();
String tableName = rowData.isNullAt(1) ? null : rowData.getString(1).toString();
return Identifier.create(databaseName, tableName);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,117 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.paimon.table.sink;

import org.apache.paimon.append.MultiTableAppendOnlyCompactionTask;
import org.apache.paimon.data.serializer.VersionedSerializer;
import org.apache.paimon.io.DataFileMetaSerializer;
import org.apache.paimon.io.DataInputDeserializer;
import org.apache.paimon.io.DataInputView;
import org.apache.paimon.io.DataOutputView;
import org.apache.paimon.io.DataOutputViewStreamWrapper;
import org.apache.paimon.io.IdentifierSerializer;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import static org.apache.paimon.utils.SerializationUtils.deserializeBinaryRow;
import static org.apache.paimon.utils.SerializationUtils.serializeBinaryRow;

/** Serializer for {@link MultiTableAppendOnlyCompactionTask}. */
public class MultiTableCompactionTaskSerializer
implements VersionedSerializer<MultiTableAppendOnlyCompactionTask> {
private static final int CURRENT_VERSION = 1;

private final DataFileMetaSerializer dataFileSerializer;

private final IdentifierSerializer identifierSerializer;

public MultiTableCompactionTaskSerializer() {
this.dataFileSerializer = new DataFileMetaSerializer();
this.identifierSerializer = new IdentifierSerializer();
}

@Override
public int getVersion() {
return CURRENT_VERSION;
}

@Override
public byte[] serialize(MultiTableAppendOnlyCompactionTask task) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
DataOutputViewStreamWrapper view = new DataOutputViewStreamWrapper(out);
serialize(task, view);
return out.toByteArray();
}

private void serialize(MultiTableAppendOnlyCompactionTask task, DataOutputView view)
throws IOException {
serializeBinaryRow(task.partition(), view);
dataFileSerializer.serializeList(task.compactBefore(), view);
identifierSerializer.serialize(task.tableIdentifier(), view);
}

@Override
public MultiTableAppendOnlyCompactionTask deserialize(int version, byte[] serialized)
throws IOException {
checkVersion(version);
DataInputDeserializer view = new DataInputDeserializer(serialized);
return deserialize(view);
}

private MultiTableAppendOnlyCompactionTask deserialize(DataInputView view) throws IOException {
return new MultiTableAppendOnlyCompactionTask(
deserializeBinaryRow(view),
dataFileSerializer.deserializeList(view),
identifierSerializer.deserialize(view));
}

public List<MultiTableAppendOnlyCompactionTask> deserializeList(int version, DataInputView view)
throws IOException {
checkVersion(version);
int length = view.readInt();
List<MultiTableAppendOnlyCompactionTask> list = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
list.add(deserialize(view));
}
return list;
}

public void serializeList(List<MultiTableAppendOnlyCompactionTask> list, DataOutputView view)
throws IOException {
view.writeInt(list.size());
for (MultiTableAppendOnlyCompactionTask commitMessage : list) {
serialize(commitMessage, view);
}
}

private void checkVersion(int version) {
if (version != CURRENT_VERSION) {
throw new UnsupportedOperationException(
"Expecting MultiTableCompactionTaskSerializer version to be "
+ CURRENT_VERSION
+ ", but found "
+ version
+ ".\nCompactionTask is not a compatible data structure. "
+ "Please restart the job afresh (do not recover from savepoint).");
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -170,23 +170,34 @@ public void createTableDefault() throws Exception {
catalog.createTable(identifier(), schemaDefault(), true);
}

public void createTable(Identifier identifier) throws Exception {
catalog.createTable(identifier, schemaDefault(), false);
}

protected void commitDefault(List<CommitMessage> messages) throws Exception {
BatchTableCommit commit = getTableDefault().newBatchWriteBuilder().newCommit();
commit.commit(messages);
commit.close();
}

protected List<CommitMessage> writeDataDefault(int size, int times) throws Exception {
return writeData(getTableDefault(), size, times);
}

protected List<CommitMessage> writeData(Table table, int size, int times) throws Exception {
List<CommitMessage> messages = new ArrayList<>();
for (int i = 0; i < times; i++) {
messages.addAll(writeOnce(getTableDefault(), i, size));
messages.addAll(writeOnce(table, i, size));
}

return messages;
}

public FileStoreTable getTableDefault() throws Exception {
return (FileStoreTable) catalog.getTable(identifier());
return getTable(identifier());
}

public FileStoreTable getTable(Identifier identifier) throws Exception {
return (FileStoreTable) catalog.getTable(identifier);
}

private List<CommitMessage> writeOnce(Table table, int time, int size) throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@
package org.apache.paimon.table.sink;

import org.apache.paimon.append.AppendOnlyCompactionTask;
import org.apache.paimon.append.MultiTableAppendOnlyCompactionTask;
import org.apache.paimon.catalog.Identifier;

import org.junit.jupiter.api.Test;

Expand All @@ -28,7 +30,7 @@
import static org.apache.paimon.mergetree.compact.MergeTreeCompactManagerTest.row;
import static org.assertj.core.api.Assertions.assertThat;

/** Tests for {@link CompactionTaskSerializer}. */
/** Tests for {@link CompactionTaskSerializer} and {@link MultiTableCompactionTaskSerializer}. */
public class CompactionTaskSerializerTest {

@Test
Expand All @@ -41,4 +43,19 @@ public void testCompactionTaskSerializer() throws IOException {
AppendOnlyCompactionTask task1 = serializer.deserialize(serializer.getVersion(), bytes);
assertThat(task).isEqualTo(task1);
}

@Test
public void testMultiTableCompactionTaskSerializer() throws IOException {
MultiTableCompactionTaskSerializer serializer = new MultiTableCompactionTaskSerializer();
MultiTableAppendOnlyCompactionTask task =
new MultiTableAppendOnlyCompactionTask(
row(0),
randomNewFilesIncrement().newFiles(),
Identifier.create("db", "table"));

byte[] bytes = serializer.serialize(task);
MultiTableAppendOnlyCompactionTask task1 =
serializer.deserialize(serializer.getVersion(), bytes);
assertThat(task).isEqualTo(task1);
}
}