Skip to content

Commit

Permalink
Replace jsqlparser with antlr gramma (#1298)
Browse files Browse the repository at this point in the history
  • Loading branch information
akrambek authored Oct 22, 2024
1 parent 5d1fc53 commit 9e34ab8
Show file tree
Hide file tree
Showing 70 changed files with 9,647 additions and 765 deletions.
1 change: 0 additions & 1 deletion cloud/docker-image/src/main/docker/assembly.xml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,6 @@
<include>org/junit/**</include>
<include>com/google/**</include>
<include>org/checkerframework/**</include>
<include>com/github/jsqlparser/**</include>
</includes>
</fileSet>
</fileSets>
Expand Down
2 changes: 1 addition & 1 deletion incubator/binding-pgsql-kafka/NOTICE
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,5 @@ WARRANTIES OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.

This project includes:
JSQLParser library under GNU Library or Lesser General Public License (LGPL) V2.1 or The Apache Software License, Version 2.0
zilla::incubator::binding-pgsql under Aklivity Community License Agreement

7 changes: 3 additions & 4 deletions incubator/binding-pgsql-kafka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
</licenses>

<properties>
<jacoco.coverage.ratio>0.82</jacoco.coverage.ratio>
<jacoco.coverage.ratio>0.86</jacoco.coverage.ratio>
<jacoco.missed.count>0</jacoco.missed.count>
</properties>

Expand All @@ -42,8 +42,8 @@
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.github.jsqlparser</groupId>
<artifactId>jsqlparser</artifactId>
<groupId>io.aklivity.zilla</groupId>
<artifactId>binding-pgsql</artifactId>
</dependency>
<dependency>
<groupId>${project.groupId}</groupId>
Expand Down Expand Up @@ -195,7 +195,6 @@
<configuration>
<excludes>
<exclude>io/aklivity/zilla/runtime/binding/pgsql/kafka/internal/types/**/*.class</exclude>
<exclude>net/sf/jsqlparser/parser/*</exclude>
</excludes>
<rules>
<rule>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@
*/
package io.aklivity.zilla.runtime.binding.pgsql.kafka.internal.schema;

import java.util.List;
import java.util.Map;

import net.sf.jsqlparser.statement.create.table.ColumnDefinition;
import net.sf.jsqlparser.statement.create.table.CreateTable;
import net.sf.jsqlparser.statement.create.table.Index;
import io.aklivity.zilla.runtime.binding.pgsql.parser.model.TableInfo;

public class PgsqlKafkaKeyAvroSchemaTemplate extends PgsqlKafkaAvroSchemaTemplate
{
Expand All @@ -35,12 +33,12 @@ public PgsqlKafkaKeyAvroSchemaTemplate(

public String generateSchema(
String database,
CreateTable createTable)
TableInfo createTable)
{
schemaBuilder.setLength(0);

final String newNamespace = namespace.replace(DATABASE_PLACEHOLDER, database);
final String recordName = String.format("%s_key", createTable.getTable().getName());
final String recordName = String.format("%s_key", createTable.name());

schemaBuilder.append("{\n");
schemaBuilder.append("\"schemaType\": \"AVRO\",\n");
Expand All @@ -52,10 +50,10 @@ public String generateSchema(
schemaBuilder.append(" \\\"namespace\\\": \\\"").append(newNamespace).append("\\\",");
schemaBuilder.append(" \\\"fields\\\": [");

for (ColumnDefinition column : createTable.getColumnDefinitions())
for (Map.Entry<String, String> column : createTable.columns().entrySet())
{
String fieldName = column.getColumnName();
String pgsqlType = column.getColDataType().getDataType();
String fieldName = column.getKey();
String pgsqlType = column.getValue();

String avroType = convertPgsqlTypeToAvro(pgsqlType);

Expand All @@ -72,28 +70,4 @@ public String generateSchema(

return schemaBuilder.toString();
}

public String primaryKey(
CreateTable statement)
{
String primaryKey = null;

final List<Index> indexes = statement.getIndexes();

if (indexes != null && !indexes.isEmpty())
{
match:
for (Index index : indexes)
{
if ("PRIMARY KEY".equalsIgnoreCase(index.getType()))
{
final List<Index.ColumnParams> primaryKeyColumns = index.getColumns();
primaryKey = primaryKeyColumns.get(0).columnName;
break match;
}
}
}

return primaryKey;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,9 @@
*/
package io.aklivity.zilla.runtime.binding.pgsql.kafka.internal.schema;

import java.util.List;
import java.util.Map;

import net.sf.jsqlparser.statement.create.table.ColumnDefinition;
import net.sf.jsqlparser.statement.create.table.CreateTable;
import net.sf.jsqlparser.statement.create.table.Index;
import io.aklivity.zilla.runtime.binding.pgsql.parser.model.TableInfo;

public class PgsqlKafkaValueAvroSchemaTemplate extends PgsqlKafkaAvroSchemaTemplate
{
Expand All @@ -35,88 +33,38 @@ public PgsqlKafkaValueAvroSchemaTemplate(

public String generateSchema(
String database,
CreateTable createTable)
TableInfo createTable)
{
schemaBuilder.setLength(0);

final String newNamespace = namespace.replace(DATABASE_PLACEHOLDER, database);
final String recordName = createTable.getTable().getName();
final String recordName = createTable.name();

schemaBuilder.append("{\n");
schemaBuilder.append("\"schemaType\": \"AVRO\",\n");
schemaBuilder.append("\"schema\": \""); // Begin the schema field
schemaBuilder.append("\"schema\": \"");

// Building the actual Avro schema
schemaBuilder.append("{\\\"type\\\": \\\"record\\\",");
schemaBuilder.append(" \\\"name\\\": \\\"").append(recordName).append("\\\",");
schemaBuilder.append(" \\\"namespace\\\": \\\"").append(newNamespace).append("\\\",");
schemaBuilder.append(" \\\"fields\\\": [");

for (ColumnDefinition column : createTable.getColumnDefinitions())
for (Map.Entry<String, String> column : createTable.columns().entrySet())
{
String fieldName = column.getColumnName();
String pgsqlType = column.getColDataType().getDataType();
String fieldName = column.getKey();
String pgsqlType = column.getValue();

String avroType = convertPgsqlTypeToAvro(pgsqlType);

schemaBuilder.append(" {\\\"name\\\": \\\"").append(fieldName).append("\\\",");
schemaBuilder.append(" \\\"type\\\": ").append(avroType).append("},");
}

// Remove the last comma and close the fields array
schemaBuilder.setLength(schemaBuilder.length() - 1);
schemaBuilder.append("]");

// Closing the Avro schema
schemaBuilder.append("}\"\n}");

return schemaBuilder.toString();
}

public String primaryKey(
CreateTable statement)
{
String primaryKey = null;

final List<Index> indexes = statement.getIndexes();

if (indexes != null && !indexes.isEmpty())
{
match:
for (Index index : indexes)
{
if ("PRIMARY KEY".equalsIgnoreCase(index.getType()))
{
final List<Index.ColumnParams> primaryKeyColumns = index.getColumns();
primaryKey = primaryKeyColumns.get(0).columnName;
break match;
}
}
}

return primaryKey;
}

public int primaryKeyCount(
CreateTable statement)
{
int primaryKeyCount = 0;

final List<Index> indexes = statement.getIndexes();

if (indexes != null && !indexes.isEmpty())
{
match:
for (Index index : indexes)
{
if ("PRIMARY KEY".equalsIgnoreCase(index.getType()))
{
primaryKeyCount = index.getColumns().size();
break match;
}
}
}

return primaryKeyCount;
}
}
Loading

0 comments on commit 9e34ab8

Please sign in to comment.