package databus_apicurio_testing.examples;
import java.io.*;
import java.time.Duration;
import java.util.Collections;
import java.util.Properties;

import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.Callback;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.clients.producer.RecordMetadata;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;

import io.apicurio.registry.utils.serde.AbstractKafkaSerDe;
import io.apicurio.registry.utils.serde.AbstractKafkaSerializer;
import io.apicurio.registry.utils.serde.JsonSchemaKafkaDeserializer;
import io.apicurio.registry.utils.serde.JsonSchemaKafkaSerializer;
import io.apicurio.registry.utils.serde.JsonSchemaSerDeConstants;


/**
 * This example demonstrates how to use the Apicurio Registry in a very simple publish/subscribe
 * scenario with JSON as the serialization type (and JSON Schema for validation).  Because JSON
 * Schema is only used for validation (not actual serialization), it can be enabled and disabled
 * without affecting the functionality of the serializers and deserializers.  However, if
 * validation is disabled, then incorrect data could be consumed incorrectly.
 *
 * The following aspects are demonstrated:
 *
 * <ol>
 *   <li>Configuring a Kafka Serializer for use with Apicurio Registry</li>
 *   <li>Configuring a Kafka Deserializer for use with Apicurio Registry</li>
 *   <li>Data sent as a MessageBean</li>
 *   <li>Data consumed as a MessageBean</li>
 * </ol>
 *
 * Pre-requisites:
 *
 * <ul>
 *   <li>Kafka must be running on localhost:9092</li>
 *   <li>Apicurio Registry must be running on localhost:8080</li>
 * </ul>
 */
public class CloudEventJsonSchemaExample2 {

    //private static final String REGISTRY_URL = "http://localhost:8080/api";
    //private static final String SERVERS = "localhost:9092";
    //private static final String TOPIC_NAME = "InventoryEvents2";
    private static final String SUBJECT_NAME = "SampleInventoryEvent";
    			
    public static final void main(String [] args) throws Exception {
    	
        if (args.length < 5) {
	        System.out.println("Usage: CloudEventJsonSchemaExample2 <bootstrap_server> <registry_url> <topic_name> <truststore_location> <truststore_password>");
	        return;
        }
        
        System.out.println("Starting example " + CloudEventJsonSchemaExample2.class.getSimpleName());
        String topicName = args[2];
        String subjectName = SUBJECT_NAME;
        
        // Create the producer.
        Producer<Object, Object> producer = createKafkaProducer(args[0],args[1],args[2],args[3],args[4]);
        
        // Produce 5 messages.
        int producedMessages = 0;
        try {
            System.out.println("Producing (5) messages.");
            for (int idx = 0; idx < 5; idx++) {
                // Create the message to send
            		EntitySpecification entitySpecification = new EntitySpecification();
            		entitySpecification.setId("6");
            		entitySpecification.setHref("http://server:port/inventory/entitySpecification/6");
            	
            	    InventoryUpdateRequest inventoryUpdateRequest = new InventoryUpdateRequest();
            	    inventoryUpdateRequest.setId("234");
            	    inventoryUpdateRequest.setHref("http://server:port/inventory/resource/234");
            	    inventoryUpdateRequest.setName("Element5");
            	    inventoryUpdateRequest.setType("ManagedElement");
            	    inventoryUpdateRequest.setBaseType("PhysicalResource");
            	    inventoryUpdateRequest.setEntitySpecification(entitySpecification);
            	    
                CloudEventBean message = new CloudEventBean();
                message.setSpecversion("1.0");
                message.setType("com.example.someevent");
                message.setDataschema("http://localhost:8080/api/artifacts/CloudEventSchema1/versions/1");
                message.setSource("/Inventory/inventoryupdate/request/");
                message.setSubject("InventoryUpdate");
                message.setId("1.0");
                message.setTime(String.valueOf(System.currentTimeMillis()));
                message.setType("com.example.someevent");
                message.setDatacontenttype("application/json");
                message.setData(inventoryUpdateRequest);

                // Send/produce the message on the Kafka Producer
                ProducerRecord<Object, Object> producedRecord = new ProducerRecord<>(topicName, subjectName, message);
                //producer.send(producedRecord);
                System.out.println("ProducerRecord = "+producedRecord);
                
                producer.send(producedRecord,
                        new Callback() {
                            public void onCompletion(RecordMetadata metadata, Exception e) {
                                if(e != null) {
                                   e.printStackTrace();
                                } else {
                                   System.out.println("The offset of the record we just sent is: " + metadata.offset());
                                }
                            }
                        });

                System.out.println("Messages produced :"+message.getData());
                Thread.sleep(100);
            }
            System.out.println("Messages successfully produced.");
        } catch(Exception e) {
        		e.printStackTrace();
        }
        finally {
            System.out.println("Closing the producer.");
            producer.flush();
            producer.close();
        }

        // Create the consumer
        System.out.println("Creating the consumer.");
        KafkaConsumer<Long, CloudEventBean> consumer = createKafkaConsumer(args[0],args[1],args[2],args[3],args[4]);

        // Subscribe to the topic
        System.out.println("Subscribing to topic " + topicName);
        consumer.subscribe(Collections.singletonList(topicName));

        // Consume the 5 messages.
        try {
            int messageCount = 0;
            System.out.println("Consuming (5) messages.");
            while (messageCount < 5) {
                final ConsumerRecords<Long, CloudEventBean> records = consumer.poll(Duration.ofSeconds(1));
                messageCount += records.count();
                if (records.count() == 0) {
                    // Do nothing - no messages waiting.
                    System.out.println("No messages waiting...");
                } else records.forEach(record -> {
                		CloudEventBean msg = record.value();
                		System.out.println("Consumed a message: " + msg.getSource());
                });
            }
        } finally {
            consumer.close();
        }

        System.out.println("Done (success).");
    }

    /**
     * Creates the Kafka producer.
     */
    private static Producer<Object, Object> createKafkaProducer(String SERVERS,String REGISTRY_URL,String TOPIC_NAME,String TRUSTSTORE_LOCATION,String TRUSTSTORE_PASSWORD) {
        
        Properties props = new Properties();

        // Configure the following three settings for SSL Encryption
        props.putIfAbsent(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL");
        props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, TRUSTSTORE_LOCATION);
        props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, TRUSTSTORE_PASSWORD);

        // Configure kafka settings
        props.putIfAbsent(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        props.putIfAbsent(ProducerConfig.CLIENT_ID_CONFIG, "InventoryProducer-" + TOPIC_NAME);
        props.putIfAbsent(ProducerConfig.ACKS_CONFIG, "all");
        props.putIfAbsent(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
        
        // Use the Apicurio Registry provided Kafka Serializer for JSON Schema
        props.putIfAbsent(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSchemaKafkaSerializer.class.getName());
        
        // Configure Apicurio Registry location
        props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, REGISTRY_URL);
        
        // Map the topic name (plus -key/value) to the artifactId in the registry
        props.putIfAbsent(AbstractKafkaSerializer.REGISTRY_ARTIFACT_ID_STRATEGY_CONFIG_PARAM,
        		io.apicurio.registry.utils.serde.strategy.SimpleTopicIdStrategy.class.getName());

        // Get an existing schema or auto-register if not found
        props.putIfAbsent(AbstractKafkaSerializer.REGISTRY_GLOBAL_ID_STRATEGY_CONFIG_PARAM,
            io.apicurio.registry.utils.serde.strategy.GetOrCreateIdStrategy.class.getName());
        
        props.putIfAbsent(JsonSchemaSerDeConstants.REGISTRY_JSON_SCHEMA_VALIDATION_ENABLED, "true");

        // Create the Kafka producer
        Producer<Object, Object> producer = new KafkaProducer<Object, Object>(props);
        return producer;
    }

    /**
     * Creates the Kafka consumer.
     */
    private static KafkaConsumer<Long, CloudEventBean> createKafkaConsumer(String SERVERS,String REGISTRY_URL,String TOPIC_NAME,String TRUSTSTORE_LOCATION,String TRUSTSTORE_PASSWORD) {
        Properties props = new Properties();

        // Configure the following three settings for SSL Encryption
        props.putIfAbsent(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL");
        props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, TRUSTSTORE_LOCATION);
        props.putIfAbsent(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, TRUSTSTORE_PASSWORD);

        // Configure Kafka
        props.putIfAbsent(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, SERVERS);
        props.putIfAbsent(ConsumerConfig.GROUP_ID_CONFIG, "InventoryConsumer-" + TOPIC_NAME);
        props.putIfAbsent(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true");
        props.putIfAbsent(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000");
        props.putIfAbsent(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
        props.putIfAbsent(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class.getName());
        // Use the Apicurio Registry provided Kafka Deserializer for JSON Schema
        props.putIfAbsent(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, JsonSchemaKafkaDeserializer.class.getName());
        
        // Configure Apicurio Registry location
        props.putIfAbsent(AbstractKafkaSerDe.REGISTRY_URL_CONFIG_PARAM, REGISTRY_URL);
        
        props.putIfAbsent(JsonSchemaSerDeConstants.REGISTRY_JSON_SCHEMA_VALIDATION_ENABLED, "true");

        // Create the Kafka Consumer
        KafkaConsumer<Long, CloudEventBean> consumer = new KafkaConsumer<Long, CloudEventBean>(props);
        return consumer;
    }
}
 

Java online compiler

Write, Run & Share Java code online using OneCompiler's Java online compiler for free. It's one of the robust, feature-rich online compilers for Java language, running the Java LTS version 17. Getting started with the OneCompiler's Java editor is easy and fast. The editor shows sample boilerplate code when you choose language as Java and start coding.

Taking inputs (stdin)

OneCompiler's Java online editor supports stdin and users can give inputs to the programs using the STDIN textbox under the I/O tab. Using Scanner class in Java program, you can read the inputs. Following is a sample program that shows reading STDIN ( A string in this case ).

import java.util.Scanner;
class Input {
    public static void main(String[] args) {
    	Scanner input = new Scanner(System.in);
    	System.out.println("Enter your name: ");
    	String inp = input.next();
    	System.out.println("Hello, " + inp);
    }
}

Adding dependencies

OneCompiler supports Gradle for dependency management. Users can add dependencies in the build.gradle file and use them in their programs. When you add the dependencies for the first time, the first run might be a little slow as we download the dependencies, but the subsequent runs will be faster. Following sample Gradle configuration shows how to add dependencies

apply plugin:'application'
mainClassName = 'HelloWorld'

run { standardInput = System.in }
sourceSets { main { java { srcDir './' } } }

repositories {
    jcenter()
}

dependencies {
    // add dependencies here as below
    implementation group: 'org.apache.commons', name: 'commons-lang3', version: '3.9'
}

About Java

Java is a very popular general-purpose programming language, it is class-based and object-oriented. Java was developed by James Gosling at Sun Microsystems ( later acquired by Oracle) the initial release of Java was in 1995. Java 17 is the latest long-term supported version (LTS). As of today, Java is the world's number one server programming language with a 12 million developer community, 5 million students studying worldwide and it's #1 choice for the cloud development.

Syntax help

Variables

short x = 999; 			// -32768 to 32767
int   x = 99999; 		// -2147483648 to 2147483647
long  x = 99999999999L; // -9223372036854775808 to 9223372036854775807

float x = 1.2;
double x = 99.99d;

byte x = 99; // -128 to 127
char x = 'A';
boolean x = true;

Loops

1. If Else:

When ever you want to perform a set of operations based on a condition If-Else is used.

if(conditional-expression) {
  // code
} else {
  // code
}

Example:

int i = 10;
if(i % 2 == 0) {
  System.out.println("i is even number");
} else {
  System.out.println("i is odd number");
}

2. Switch:

Switch is an alternative to If-Else-If ladder and to select one among many blocks of code.

switch(<conditional-expression>) {    
case value1:    
 // code    
 break;  // optional  
case value2:    
 // code    
 break;  // optional  
...    
    
default:     
 //code to be executed when all the above cases are not matched;    
} 

3. For:

For loop is used to iterate a set of statements based on a condition. Usually for loop is preferred when number of iterations is known in advance.

for(Initialization; Condition; Increment/decrement){  
    //code  
} 

4. While:

While is also used to iterate a set of statements based on a condition. Usually while is preferred when number of iterations are not known in advance.

while(<condition>){  
 // code 
}  

5. Do-While:

Do-while is also used to iterate a set of statements based on a condition. It is mostly used when you need to execute the statements atleast once.

do {
  // code 
} while (<condition>); 

Classes and Objects

Class is the blueprint of an object, which is also referred as user-defined data type with variables and functions. Object is a basic unit in OOP, and is an instance of the class.

How to create a Class:

class keyword is required to create a class.

Example:

class Mobile {
    public:    // access specifier which specifies that accessibility of class members 
    string name; // string variable (attribute)
    int price; // int variable (attribute)
};

How to create a Object:

Mobile m1 = new Mobile();

How to define methods in a class:

public class Greeting {
    static void hello() {
        System.out.println("Hello.. Happy learning!");
    }

    public static void main(String[] args) {
        hello();
    }
}

Collections

Collection is a group of objects which can be represented as a single unit. Collections are introduced to bring a unified common interface to all the objects.

Collection Framework was introduced since JDK 1.2 which is used to represent and manage Collections and it contains:

  1. Interfaces
  2. Classes
  3. Algorithms

This framework also defines map interfaces and several classes in addition to Collections.

Advantages:

  • High performance
  • Reduces developer's effort
  • Unified architecture which has common methods for all objects.
CollectionDescription
SetSet is a collection of elements which can not contain duplicate values. Set is implemented in HashSets, LinkedHashSets, TreeSet etc
ListList is a ordered collection of elements which can have duplicates. Lists are classified into ArrayList, LinkedList, Vectors
QueueFIFO approach, while instantiating Queue interface you can either choose LinkedList or PriorityQueue.
DequeDeque(Double Ended Queue) is used to add or remove elements from both the ends of the Queue(both head and tail)
MapMap contains key-values pairs which don't have any duplicates. Map is implemented in HashMap, TreeMap etc.