diff --git a/Cargo.lock b/Cargo.lock index 7426abd..6153bb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4780,7 +4780,7 @@ dependencies = [ [[package]] name = "yozefu" -version = "0.0.1" +version = "0.0.2" dependencies = [ "tokio", "yozefu-command", @@ -4788,7 +4788,7 @@ dependencies = [ [[package]] name = "yozefu-app" -version = "0.0.1" +version = "0.0.2" dependencies = [ "async-trait", "extism", @@ -4806,7 +4806,7 @@ dependencies = [ [[package]] name = "yozefu-command" -version = "0.0.1" +version = "0.0.2" dependencies = [ "chrono", "clap", @@ -4832,7 +4832,7 @@ dependencies = [ [[package]] name = "yozefu-lib" -version = "0.0.1" +version = "0.0.2" dependencies = [ "apache-avro", "chrono", @@ -4851,7 +4851,7 @@ dependencies = [ [[package]] name = "yozefu-tui" -version = "0.0.1" +version = "0.0.2" dependencies = [ "bytesize", "chrono", @@ -4880,7 +4880,7 @@ dependencies = [ [[package]] name = "yozefu-wasm-types" -version = "0.0.1" +version = "0.0.2" dependencies = [ "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index d1cadd2..b9137ab 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -22,7 +22,7 @@ default-members = [ resolver = "2" [workspace.package] -version = "0.0.1" +version = "0.0.2" edition = "2021" authors = ["Yann Prono "] readme = "README.md" @@ -32,12 +32,12 @@ license = "Apache-2.0" [workspace.dependencies] -lib = { package = "yozefu-lib", path = "crates/lib/", version = "0.0.1" } -app = { package = "yozefu-app", path = "crates/app/", version = "0.0.1" } -command = { package = "yozefu-command", path = "crates/command/", version = "0.0.1" } -yozefu = { package = "yozefu", path = "crates/bin/", version = "0.0.1" } -tui = { package = "yozefu-tui", path = "crates/tui/", version = "0.0.1" } -wasm-types = { package = "wasm-types", path = "crates/wasm-types/", version = "0.0.1" } +lib = { package = "yozefu-lib", path = "crates/lib/", version = "0.0.2" } +app = { package = "yozefu-app", path = "crates/app/", version = "0.0.2" } +command = { package = "yozefu-command", path = "crates/command/", version = "0.0.2" } +yozefu = { package = "yozefu", path = "crates/bin/", version = "0.0.2" } +tui = { package = "yozefu-tui", path = "crates/tui/", version = "0.0.2" } +wasm-types = { package = "wasm-types", path = "crates/wasm-types/", version = "0.0.2" } [profile.release] opt-level = 3 @@ -51,7 +51,7 @@ incremental = false [workspace.metadata.release] shared-version = true -tag-message = "chore: Release version {{version}}" -pre-release-commit-message = "chore: Release version {{version}}" +tag-message = "chore: Release version v{{version}}" +pre-release-commit-message = "chore: Release version v{{version}}" tag-name = "{{version}}" diff --git a/README.md b/README.md index 25bb868..44b0233 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,6 @@ -Build status Minimum supported Rust version: 1.80.1 or plus Licence @@ -96,7 +95,7 @@ yozf -c localhost - [The query language.](./docs/query-language/README.md) - [Creating a search filter.](./docs/search-filter/README.md) - - [Configuring the tool with TLS.](./docs/tls/README.md) + - [TLS encryption and authentication](./docs/tls/README.md). - [URL templates to switch to web applications.](./docs/url-templates/README.md) - [Schema registry.](./docs/schema-registry/README.md) - [Themes.](./docs/themes/README.md) diff --git a/crates/bin/Cargo.toml b/crates/bin/Cargo.toml index 34db3c4..ea1ba94 100644 --- a/crates/bin/Cargo.toml +++ b/crates/bin/Cargo.toml @@ -31,4 +31,4 @@ ssl-vendored = [ ] gssapi-vendored = [ "command/gssapi-vendored" -] \ No newline at end of file +] diff --git a/crates/command/Cargo.toml b/crates/command/Cargo.toml index 737edd5..46f8279 100644 --- a/crates/command/Cargo.toml +++ b/crates/command/Cargo.toml @@ -62,4 +62,4 @@ gssapi-vendored = [ "rdkafka/gssapi-vendored", "tui/gssapi-vendored", "app/gssapi-vendored" -] \ No newline at end of file +] diff --git a/docs/keybindings/README.md b/docs/keybindings/README.md index f0a57f4..a7bd588 100644 --- a/docs/keybindings/README.md +++ b/docs/keybindings/README.md @@ -5,13 +5,13 @@ So far, keybindings are hardcoded. -**General keybindings** +**General** | Keybinding | Description | | --------------------------------- | :------------------------------------- | -| tab | Next panel | -| shift + tab | previous panel | +| Tab | Next panel | +| Shift + Tab | previous panel | | / | Go to search bar | -| escape | Close the view or exit the app | +| Escape | Close the last visible dialog | | ctrl + H | Show/Hide help | | ctrl + O | Show/Hide topics | | [ | Scroll to top | @@ -20,17 +20,17 @@ So far, keybindings are hardcoded. | K | Move to downward direction by one line | - +
**Topics** -| Keybinding | Description | -| ------------------------------ | :----------------- | -| tab | Next panel | -| ctrl + P | Show topic details | -| ctrl + U | Unselect topics | -| enter | Select the topic | +| Keybinding | Description | +| ------------------------------ | :------------------ | +| ctrl + P | Show topic details | +| ctrl + U | Unselect all topics | +| Enter | Select the topic | +
**Records list** | Keybinding | Description | @@ -39,10 +39,12 @@ So far, keybindings are hardcoded. | O | Open the kafka record in the web browser | | E | Export kafka record to the file | | F | Keep selecting the last consumed kafka record | -| enter | Open the selected record | +| Enter | Open the selected record | | or | Previous/next record | +
+ **Record** | Keybinding | Description | @@ -55,12 +57,19 @@ So far, keybindings are hardcoded.
+**Schemas** + +| Keybinding | Description | +| ---------------------------- | :------------------------ | +| C | Copy schemas to clipboard | +| or | Scroll | + +
+ + **Search** -| Keybinding | Description | -| ---------------------------- | :--------------------------------------- | -| or | Browse history | -| C | Copy kafka record to clipboard | -| O | Open the kafka record in the web browser | -| E | Export kafka record to the file | -| enter | Search kafka records | +| Keybinding | Description | +| ---------------------------- | :------------------- | +| or | Browse history | +| Enter | Search kafka records | diff --git a/docs/schemas/MyProducer.java b/docs/schemas/MyProducer.java index 2e4f29e..2e09ee3 100644 --- a/docs/schemas/MyProducer.java +++ b/docs/schemas/MyProducer.java @@ -170,7 +170,7 @@ public Properties kafkaProperties() { return props; } - public static void produce(KafkaProducer producer, Into mapper, List addresses, String topic) throws Exception { + public static void produce(final KafkaProducer producer, final Into mapper, final List addresses, final String topic) throws Exception { for (var address : addresses) { var record = mapper.into(address, topic); producer.send(record, onSend()); @@ -189,7 +189,7 @@ private static Callback onSend() { }; } - private static List get(String apiUrl, String query) throws IOException, InterruptedException { + private static List get(final String apiUrl, String query) throws IOException, InterruptedException { System.err.printf(" 🏡 Searching french addresses matching the query '%s'\n", query); var url = String.format(apiUrl, query.trim().toLowerCase()); @@ -229,7 +229,7 @@ public static void main(String[] args) { interface Into { - ProducerRecord into(String value, String topic) throws Exception; + ProducerRecord into(final String value, final String topic) throws Exception; default String generateKey() { return UUID.randomUUID().toString(); @@ -243,7 +243,7 @@ default String readResource(String path) throws Exception { } class IntoText implements Into { - public ProducerRecord into(String value, String topic) throws JsonProcessingException { + public ProducerRecord into(final String value, final String topic) throws JsonProcessingException { var objectMapper = new ObjectMapper(); var object = objectMapper.readTree(value); return new ProducerRecord<>(topic, this.generateKey(), object.get("properties").get("label").asText()); @@ -251,13 +251,13 @@ public ProducerRecord into(String value, String topic) throws Js } class IntoJson implements Into { - public ProducerRecord into(String value, String topic) { + public ProducerRecord into(final String value, final String topic) { return new ProducerRecord<>(topic, generateKey(), value); } } class IntoJsonSchema implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { var objectMapper = new ObjectMapper(); var keySchemaString = readResource("/json-schema/key-schema.json"); var valueSchemaString = readResource("/json-schema/value-schema.json"); @@ -275,7 +275,7 @@ public ProducerRecord into(String input, String topic) throw } class IntoAvro implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { var keySchemaString = readResource("/avro/key-schema.json"); var valueSchemaString = readResource("/avro/value-schema.json"); @@ -293,7 +293,7 @@ public ProducerRecord into(String input, String to // TODO work in progress class IntoProtobuf implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { var keySchemaString = readResource("/protobuf/key-schema.proto"); var valueSchemaString = readResource("/protobuf/value-schema.proto"); @@ -309,7 +309,7 @@ public ProducerRecord into(String input, String topic) throws Ex } class IntoMalformed implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { byte randomSchemaId = (byte) ((Math.random() * (127 - 1)) + 1); var header = new byte[]{0, 0, 0, 0, randomSchemaId}; @@ -329,9 +329,8 @@ public ProducerRecord into(String input, String topic) throws Ex } } - class IntoInvalidJson implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { var objectMapper = new ObjectMapper(); var keySchemaString = readResource("/json-schema/key-schema.json"); var valueSchemaString = readResource("/json-schema/value-schema.json"); @@ -350,7 +349,7 @@ public ProducerRecord into(String input, String topic) throw } class IntoXml implements Into { - public ProducerRecord into(String input, String topic) throws Exception { + public ProducerRecord into(final String input, final String topic) throws Exception { var objectMapper = new ObjectMapper(); var xmlMapper = new XmlMapper(); var value = objectMapper.readTree(input); diff --git a/docs/tls/README.md b/docs/tls/README.md index bbafdc9..a386442 100644 --- a/docs/tls/README.md +++ b/docs/tls/README.md @@ -1,4 +1,4 @@ -# TLS Support +# TLS encryption and authentication

@@ -6,15 +6,12 @@

- This page helps you configure TLS settings for different providers. The steps are always the same: 1. Open the configuration with `yozf configure` 2. Edit the configuration file by adding a new cluster. 3. Save the file and run start the tool `yozf -c my-cluster` -If you use any of the following properties:`ssl.ca.location`, `ssl.certificate.location`, `ssl.key.location`, make sure to provide an absolute path, using `~` in the path doesn't work. - > [!WARNING] > `SASL_SSL` security protocol is not available for `aarch64-unknown-linux-gnu` and `windows` targets. I'm facing some compilation issues. @@ -138,10 +135,13 @@ Please note that, according to [the documentation](https://github.com/confluenti [Contributions are welcomed](https://github.com/MAIF/yozefu/edit/main/docs/tls.md) to improve this page. -| Provider | Tested | Documentation | -| --------------------- | ------- | ----------------------------------------------------------------------------------------------------------------------------- | -| Google Cloud Platform | `false` | https://cloud.google.com/managed-service-for-apache-kafka/docs/quickstart#cloud-shell | -| Amazon Web Services | `false` | https://docs.aws.amazon.com/msk/latest/developerguide/produce-consume.html | -| Microsoft Azure | `false` | https://learn.microsoft.com/fr-fr/azure/event-hubs/azure-event-hubs-kafka-overview | -| DigitalOcean | `false` | https://docs.digitalocean.com/products/databases/kafka/how-to/connect/ | -| OVH | `false` | https://help.ovhcloud.com/csm/en-ie-public-cloud-databases-kafka-getting-started?id=kb_article_view&sysparm_article=KB0048944 | \ No newline at end of file +| Provider | Compatible | Documentation | +| ----------------------- | ---------- | ----------------------------------------------------------------------------------------------------------------------------- | +| Google Cloud Platform | ? | https://cloud.google.com/managed-service-for-apache-kafka/docs/quickstart#cloud-shell | +| Amazon Web Services | ? | https://docs.aws.amazon.com/msk/latest/developerguide/produce-consume.html | +| Microsoft Azure | ? | https://learn.microsoft.com/fr-fr/azure/event-hubs/azure-event-hubs-kafka-overview | +| DigitalOcean | ? | https://docs.digitalocean.com/products/databases/kafka/how-to/connect/ | +| OVH | ? | https://help.ovhcloud.com/csm/en-ie-public-cloud-databases-kafka-getting-started?id=kb_article_view&sysparm_article=KB0048944 | +| Aiven for Apache Kafka® | `true` | https://aiven.io/docs/products/kafka/howto/list-code-samples | +| Confluent Cloud | `true` | https://confluent.cloud/environments | +| Redpanda | `true` | https://cloud.redpanda.com/clusters | \ No newline at end of file