I am trying to implement pagination of dynamodb using java in my project, i have implemented it, I am struggling in how to get the no. of elements in result per page using DynamoDBScanExpression. Can anyone help. Thanks
Here is my code
package com.morrisons.extendedrange.dao;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClient;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig.TableNameOverride;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBQueryExpression;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBScanExpression;
import com.amazonaws.services.dynamodbv2.datamodeling.PaginatedScanList;
import com.amazonaws.services.dynamodbv2.datamodeling.QueryResultPage;
import com.amazonaws.services.dynamodbv2.datamodeling.ScanResultPage;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
import com.amazonaws.services.dynamodbv2.model.Condition;
import com.amazonaws.services.dynamodbv2.model.ReturnConsumedCapacity;
import com.google.inject.Inject;
import com.morrisons.extendedrange.config.ExtendedRangeServiceConfiguration;
import com.morrisons.extendedrange.entity.ExtendedRangeEntity;
import com.morrisons.extendedrange.exception.ErrorCodes;
import com.morrisons.extendedrange.exception.ExtendedRangeServiceException;
import com.morrisons.extendedrange.model.ExtendedRange;
import com.morrisons.extendedrange.util.Logger;
import com.morrisons.extendedrange.util.LoggerFactory;
/**
* The Class ExtendedRangeDao.
*/
public class ExtendedRangeDao {
private static Logger LOGGER;
#Inject
private LoggerFactory logFactory;
Map<String, AttributeValue> lastEvaluatedKey = null;
#Inject
private void init() {
LOGGER = logFactory.getLogger(ExtendedRangeDao.class);
}
/** The range service configuration. */
#Inject
private ExtendedRangeServiceConfiguration extendedRangeServiceConfiguration;
/**
* Gets the range.
*
* #param storeId
* the store id
* #param catalogId
* the catalog id
* #return the range
*/
public ExtendedRange getExtendedRange(String catalogId, String productId, String page) {
LOGGER.debug("ExtendedRangeDao : getExtendedRange start ");
List<ExtendedRange> extendedRangeList = new ArrayList<ExtendedRange>();
try {
AmazonDynamoDBClient client = extendedRangeServiceConfiguration.getDynamoDBClient();
DynamoDBMapper mapper = new DynamoDBMapper(client);
Map<String, AttributeValue> eav = new HashMap<String, AttributeValue>();
eav.put(":val1", new AttributeValue().withS(productId));
eav.put(":val2", new AttributeValue().withS(catalogId));
DynamoDBScanExpression scanExpression=new DynamoDBScanExpression()
.withFilterExpression("(productname =:val1 or productid =:val1) and catalogid = :val2 ")
.withExpressionAttributeValues(eav)
.withConsistentRead(true)
.withLimit(5)
.withExclusiveStartKey(lastEvaluatedKey) ;
ScanResultPage<ExtendedRangeEntity> queryResultPage = mapper.scanPage(ExtendedRangeEntity.class,
scanExpression, getDBMapperConfigForFetch());
List<ExtendedRangeEntity> extendedRangeEntityList = queryResultPage.getResults();
lastEvaluatedKey = queryResultPage.getLastEvaluatedKey();
if (extendedRangeEntityList.isEmpty()) {
String errorMessage = "No Record found or Multiple data found";
LOGGER.debug(errorMessage);
throw new ExtendedRangeServiceException(ErrorCodes.NO_RECORDS_FOUND, "ExtendedRangeEntity not found");
} else {
for (ExtendedRangeEntity extendedRangeEntity : extendedRangeEntityList) {
ExtendedRange extendedRange = new ExtendedRange();
copyProperties(extendedRange, extendedRangeEntity);
extendedRangeList.add(extendedRange);
LOGGER.debug("ExtendedRangeDao : getExtendedRange end ");
}
ExtendedRange returnExtendedRange = new ExtendedRange();
returnExtendedRange.setExtendedRangeList(extendedRangeList);
return returnExtendedRange;
}
} catch (AmazonServiceException e) {
String errorMessage = "Error in retrieving Data in DynamoDB";
LOGGER.error(errorMessage, e);
throw new ExtendedRangeServiceException(ErrorCodes.AMAZON_SERVICE_ERROR, errorMessage);
}
}
private DynamoDBMapperConfig getDBMapperConfigForFetch() {
String tableName = extendedRangeServiceConfiguration.getTableNameConfig()
.getTableName(ExtendedRangeEntity.class);
TableNameOverride tableNameOverride = new TableNameOverride(tableName);
DynamoDBMapperConfig dbMapperConfig = new DynamoDBMapperConfig(tableNameOverride);
return dbMapperConfig;
}
public ExtendedRangeServiceConfiguration getExtendedRangeServiceConfiguration() {
return extendedRangeServiceConfiguration;
}
public void setRangeServiceConfiguration(ExtendedRangeServiceConfiguration extendedRangeServiceConfiguration) {
this.extendedRangeServiceConfiguration = extendedRangeServiceConfiguration;
}
private void copyProperties(ExtendedRange target, ExtendedRangeEntity src) {
target.setProductId(src.getProductId());
target.setLeadTimeUOM(src.getLeadTimeUOM());
target.setCatalogId(src.getCatalogId());
target.setProductDesc(src.getProductDesc());
target.setProductName(src.getProductName());
target.setLeadTime(src.getLeadTime());
target.setCanBeOrderedFromDate(src.getCanBeOrderedFromDate());
target.setCanBeOrderedToDate(src.getCanBeOrderedFromDate());
}
}
The size() method should provide the number of elements in a page.
queryResultPage.getResults().size()
Related
I'm using spring-boot 2.7.4 and spring-cloud-dependencies 2021.0.4.
I haven't found any solution in spring documentation for add trustedTypes in BatchMessagingMessageConverter. I'm using kafka for read messages in batch-mode. If I insert a custom header (my own class) when the consumer read the header return a DefaultKafkaHeaderMapper$NonTrustedHeaderType and not my class.
I have in my configuration this key to activate batch mode:
spring.cloud.stream.bindings.nameBind-in-0.consumer.batch-mode=true
I tried in debug to add to headerMapper in BatchMessagingMessageConverter the package of my class and all works fine. There is a way to specify my package in configuration?
I followed the documentation https://docs.spring.io/spring-cloud-stream/docs/3.2.5/reference/html/spring-cloud-stream-binder-kafka.html#kafka-binder-properties, I created a bean like this:
#Bean("kafkaHeaderMapperCustom")
KafkaHeaderMapper getKafkaHeaderMapperCustom() {
var defKHM = new DefaultKafkaHeaderMapper();
defKHM.addTrustedPackages("*");
return defKHM;
}
Specified to key spring.cloud.stream.kafka.binder.headerMapperBeanName in configuration but doesn't work, I suppose that configuration is valid for not batch context?
I tried also these properties:
spring.kafka.consumer.properties.spring.json.trusted.packages
spring.json.trusted.packages
EDIT - Add example:
import org.springframework.boot.ApplicationRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.stream.function.StreamBridge;
import org.springframework.context.annotation.Bean;
import org.springframework.kafka.support.DefaultKafkaHeaderMapper;
import org.springframework.kafka.support.KafkaHeaderMapper;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHeaders;
import org.springframework.messaging.support.MessageBuilder;
import java.time.LocalDate;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
#SpringBootApplication
public class Application {
public static final String HEADER_KEY = "CUSTOM_HEADER_KEY";
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
#Bean
public ApplicationRunner runner(StreamBridge streamBridge) {
return args -> {
var headers = new MessageHeaders(Map.of(HEADER_KEY, new CustomHeaderClass("field1Value", LocalDate.now())));
headers.get(KafkaHeaders.BATCH_CONVERTED_HEADERS);
var message = MessageBuilder.createMessage(new ExampleBrokenHeaderEntity("randomValue", "randomName"), headers);
streamBridge.send("stackoverflow-example", message);
};
}
#Bean
public Consumer<Message<List<ExampleBrokenHeaderEntity>>> readFromKafkaBatchMode() {
return messages -> {
var brokenHeader = ((ArrayList<Map<String, Object>>) messages.getHeaders().get(KafkaHeaders.BATCH_CONVERTED_HEADERS)).get(0).get(HEADER_KEY);
System.out.println("BATCH - Class header: " + (brokenHeader != null ? brokenHeader.getClass() : null));
};
}
#Bean
public Consumer<Message<ExampleBrokenHeaderEntity>> readFromKafkaNoBatchMode() {
return messages -> {
var brokenHeader = messages.getHeaders().get(HEADER_KEY);
System.out.println("NO_BATCH - Class header: " + (brokenHeader != null ? brokenHeader.getClass() : null));
};
}
#Bean("kafkaHeaderMapperCustom")
public KafkaHeaderMapper getKafkaHeaderMapperBatchMode() {
var kafkaHeaderMapperCustom = new DefaultKafkaHeaderMapper();
kafkaHeaderMapperCustom.addTrustedPackages("*");
return kafkaHeaderMapperCustom;
}
}
import lombok.AllArgsConstructor;
import lombok.Data;
import lombok.NoArgsConstructor;
import java.time.LocalDate;
#Data
#NoArgsConstructor
#AllArgsConstructor
public class CustomHeaderClass {
private String field1;
private LocalDate field2;
}
import lombok.AllArgsConstructor;
import lombok.Data;
#Data
#AllArgsConstructor
public final class ExampleBrokenHeaderEntity {
private String type;
private String name;
}
spring.cloud.stream.kafka.binder.brokers=x.x.x.x:xxxx
spring.cloud.function.definition=readFromKafkaNoBatchMode;readFromKafkaBatchMode
spring.cloud.stream.bindings.readFromKafkaBatchMode-in-0.destination=stackoverflow-example
spring.cloud.stream.bindings.readFromKafkaBatchMode-in-0.group=readFromKafkaBatchMode
spring.cloud.stream.bindings.readFromKafkaBatchMode-in-0.consumer.batch-mode=true
spring.cloud.stream.bindings.readFromKafkaNoBatchMode-in-0.destination=stackoverflow-example
spring.cloud.stream.bindings.readFromKafkaNoBatchMode-in-0.group=readFromKafkaNoBatchMode
spring.cloud.stream.kafka.binder.headerMapperBeanName=kafkaHeaderMapperCustom
The output of example is:
NO_BATCH - Class header: class com.example.kafka.header.types.CustomHeaderClass
BATCH - Class header: class org.springframework.kafka.support.DefaultKafkaHeaderMapper$NonTrustedHeaderType
It's a bug; the binder only sets the custom header mapper on a record converter:
private MessageConverter getMessageConverter(
final ExtendedConsumerProperties<KafkaConsumerProperties> extendedConsumerProperties) {
MessageConverter messageConverter = BindingUtils.getConsumerMessageConverter(getApplicationContext(),
extendedConsumerProperties, this.configurationProperties);
if (messageConverter instanceof MessagingMessageConverter) {
((MessagingMessageConverter) messageConverter).setHeaderMapper(getHeaderMapper(extendedConsumerProperties));
}
return messageConverter;
}
There should be similar code for when the converter is a BatchMessagingMessageConverter.
The work around is to define a custom message converter for the batch consumer:
#Bean("batchConverter")
BatchMessageConverter batchConverter(KafkaHeaderMapper kafkaHeaderMapperCustom) {
BatchMessagingMessageConverter batchConv = new BatchMessagingMessageConverter();
batchConv.setHeaderMapper(kafkaHeaderMapperCustom);
return batchConv;
}
spring.cloud.stream.kafka.bindings.readFromKafkaBatchMode-in-0.consumer.converter-bean-name=batchConverter
NO_BATCH - Class header: class com.example.demo.So74294156Application$CustomHeaderClass
BATCH - Class header: class com.example.demo.So74294156Application$CustomHeaderClass
Please open an issue against Spring Cloud Stream, referencing this question/answer.
I have a simple login servlet which checks the email and password if the user is registered or not , now I'm trying to work on a JavaFX app which does the same job by reading from the servlet the information, my servlet works perfect but I don't know how to interact with my servlet from JavaFX , how can I send a response from the servlet to JavaFX ? I've searched a lot but couldn't find anything
Here's my servlet :
import java.io.*;
import java.security.Principal;
import java.util.Date;
import java.util.HashMap;
import java.util.Map;
import jakarta.servlet.RequestDispatcher;
import jakarta.servlet.ServletConfig;
import jakarta.servlet.ServletContext;
import jakarta.servlet.ServletException;
import jakarta.servlet.annotation.WebServlet;
import jakarta.servlet.http.HttpServlet;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import jakarta.servlet.http.HttpSession;
#WebServlet("/HelloServlet")
public class HelloServlet extends HttpServlet {
//int attempts = 3;
Date date;
/**
*
*/
private static final long serialVersionUID = -5498866193863633001L;
/**
* HashMap to store all users credentials
*/
private final Map<String, String> credentialsPairs = new HashMap<>();
#Override
public void init(ServletConfig config) throws ServletException {
String delimiter = ",";
String line = "";
/**
* Credentials file will be there in WEB-INF directory as it provide secured
* access only.
*/
String credentialFile = "/WEB-INF/accounts.txt";
/**
* Read the file and prepare Map with username as key and password as value We
* have put this code in init method as it is called once only that will avoid
* overhead of iterating values from file for each request
*/
InputStream is = null;
InputStreamReader isr = null;
BufferedReader br = null;
ServletContext context = config.getServletContext();
try {
/**
* Open stream of file
*/
is = context.getResourceAsStream(credentialFile);
if (is != null) {
/**
* Read the file line by line and store email as a key and password as value
*/
isr = new InputStreamReader(is);
br = new BufferedReader(isr);
while ((line = br.readLine()) != null) {
String[] credentials = line.split(delimiter);
// credentials[0] is email and credentials[1] is password
credentialsPairs.put(credentials[0], credentials[1]);
}
}
} catch (Exception e) {
e.printStackTrace();
} finally {
try {
if (br != null) {
br.close();
}
if (isr != null) {
isr.close();
}
if (is != null) {
is.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
public void service(HttpServletRequest request, HttpServletResponse response) throws
IOException, ServletException {
/**
* Get user entered credentials
*/
String userEmail = request.getParameter("email");
String userPassword = request.getParameter("password");
PrintWriter out = response.getWriter();
boolean isValidUser = false;
/**
* Get value from Map for user entered email address.
*/
String password = credentialsPairs.get(userEmail);
/**
* If User with entered email address found then we will get password for that
* user
*/
if (password != null) {
/**
* Compare password entered by user with one that is retrieved from file
*/
if (password.equals(userPassword)) {
isValidUser = true;
}
}
}
HttpSession session = request.getSession();
if (isValidUser) {
//HttpSession session = request.getSession();
session.setAttribute("email", userEmail);
//request.getRequestDispatcher("welcome.jsp").include(request, response);
response.sendRedirect("welcome.jsp");
//response.setContentType("text/html");
//response.sendError(HttpServletResponse.SC_FOUND,"Hello");
}
else {
int loginAttempt;
if (session.getAttribute("loginCount") == null)
{
session.setAttribute("loginCount", 0);
loginAttempt = 0;
}
else
{
loginAttempt = (Integer) session.getAttribute("loginCount");
}
//this is 3 attempt counting from 0,1,2
if (loginAttempt >= 2 )
{
long lastAccessedTime = session.getLastAccessedTime();
date = new Date();
long currentTime = date.getTime();
long timeDiff = currentTime - lastAccessedTime;
// 20 minutes in milliseconds
if (timeDiff >= 1200000)
{
//invalidate user session, so they can try again
session.invalidate();
}
else
{
// Error message
session.setAttribute("message","You have exceeded the 3 failed login
attempt. Please try loggin in in 20 minutes.");
//request.getRequestDispatcher("fail.jsp");
out.println("You have exceeded the 3 failed login attempt. Please
try loggin in in 20 minutes.");
}
}
else
{
loginAttempt++;
int allowLogin = 3-loginAttempt;
session.setAttribute("message","loginAttempt= "+loginAttempt+". Invalid
username or password. You have "+allowLogin+" attempts remaining. Please try again!");
out.println("Invalid email or password , please try again");
final String message = "The requested page not found";
response.sendError(HttpServletResponse.SC_NOT_FOUND,message);
}
session.setAttribute("loginCount",loginAttempt);
}
}
public void destroy() {
/**
* Free up the map
*/
credentialsPairs.clear();
}
}
Here's my JavaFX code :
import javafx.application.Application;
import javafx.fxml.FXMLLoader;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.TextField;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.HttpURLConnection;
import java.net.URL;
public class HelloApplication extends Application {
TextField tName;
TextField tPassword;
ComboBox comboBox;
Button login;
VBox vBox;
BorderPane borderPane;
URL url = null;
HttpURLConnection con = null;
int flag =0;
#Override
public void start(Stage stage) throws IOException {
stage.setTitle("login Page");
stage.setResizable(false);
setDesign();
comboBox.setOnAction(e->{
if(comboBox.getSelectionModel().isSelected(0)){
flag =0;
}else if (comboBox.getSelectionModel().isSelected(1)){
flag =1;
}
});
login.setOnAction(e->{
if(flag==0){
try {
System.out.println("get");
String name =tName.getText();
String password =tPassword.getText();
url = new URL("http://localhost:8080/try8_war_exploded/login?
email="+name+"&password="+password);
con = (HttpURLConnection)url.openConnection();
con.setRequestMethod("GET");
con.setRequestProperty("Content-Type","text/html");
con.setConnectTimeout(5000);
con.setReadTimeout(5000);
} catch(Exception c) {
c.printStackTrace();
}
}
else {
}
int status = 0;
StringBuffer data = new StringBuffer();
try {
status = con.getResponseCode();
if(status > 299) {
System.out.println("error "+status);
System.out.println(con.getErrorStream());
return;
}
BufferedReader in = new BufferedReader(new
InputStreamReader(con.getInputStream()));
String str = null;
while((str=in.readLine()) != null) data.append(str);
System.out.println("here");
System.out.println(str);
in.close();
con.disconnect();
} catch(Exception c) {
c.printStackTrace();
}
});
Scene scene =new Scene(borderPane,350,180);
stage.setScene(scene);
stage.show();
}
public void setDesign(){
borderPane = new BorderPane();
borderPane.setPadding(new Insets(20,20,20,20));
vBox = new VBox(10);
vBox.setAlignment(Pos.CENTER);
HBox hBox1 =new HBox(5);
hBox1.setAlignment(Pos.CENTER);
HBox hBox2 =new HBox(5);
hBox2.setAlignment(Pos.CENTER);
Label label1= new Label("E-mail");
tName = new TextField();
tName.setMaxWidth(150);
hBox1.getChildren().addAll(label1,tName);
Label label2= new Label("Password");
tPassword = new TextField();
tPassword.setMaxWidth(150);
hBox2.getChildren().addAll(label2,tPassword);
comboBox = new ComboBox();
comboBox.getItems().add("Get");
comboBox.getItems().add("Post");
comboBox.getSelectionModel().selectFirst();
login = new Button("Login");
vBox.getChildren().addAll(hBox1,hBox2,comboBox,login);
borderPane.setTop(vBox);
}
public static void main(String[] args) {
launch();
}
}
I'm posting the entire codes so that everything is clear
We need an example on how to test ReactiveKafkaConsumerTemplate and ReactiveKafkaProducerTemplate with an embedded-kafka-broker. Thanks.
CORRECT CODE IS HERE AFTR DISCUSSION
You can have your custom de-serializer accordingly to use custom ReactiveKafkaConsumerTemplate
Custom Serialiser:
import org.apache.kafka.common.serialization.Serializer;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
public class EmployeeSerializer implements Serializer<Employee> {
#Override
public byte[] serialize(String topic, Employee data) {
byte[] rb = null;
ObjectMapper mapper = new ObjectMapper();
try {
rb = mapper.writeValueAsString(data).getBytes();
} catch (JsonProcessingException e) {
e.printStackTrace();
}
return rb;
}
}
Use it part of embedded-kfka-reactive test:
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.connect.json.JsonSerializer;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.kafka.core.reactive.ReactiveKafkaProducerTemplate;
import org.springframework.kafka.support.converter.MessagingMessageConverter;
import org.springframework.kafka.test.condition.EmbeddedKafkaCondition;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import reactor.kafka.sender.SenderOptions;
import reactor.kafka.sender.SenderRecord;
import reactor.test.StepVerifier;
#EmbeddedKafka(topics = EmbeddedKafkareactiveTest.REACTIVE_INT_KEY_TOPIC,
brokerProperties = { "transaction.state.log.replication.factor=1", "transaction.state.log.min.isr=1" })
public class EmbeddedKafkareactiveTest {
public static final String REACTIVE_INT_KEY_TOPIC = "reactive_int_key_topic";
private static final Integer DEFAULT_KEY = 1;
private static final String DEFAULT_VERIFY_TIMEOUT = null;
private ReactiveKafkaProducerTemplate<Integer, Employee> reactiveKafkaProducerTemplate;
#BeforeEach
public void setUp() {
reactiveKafkaProducerTemplate = new ReactiveKafkaProducerTemplate<>(setupSenderOptionsWithDefaultTopic(),
new MessagingMessageConverter());
}
private SenderOptions<Integer, Employee> setupSenderOptionsWithDefaultTopic() {
Map<String, Object> senderProps = KafkaTestUtils
.producerProps(EmbeddedKafkaCondition.getBroker().getBrokersAsString());
SenderOptions<Integer, Employee> senderOptions = SenderOptions.create(senderProps);
senderOptions = senderOptions.producerProperty(ProducerConfig.TRANSACTIONAL_ID_CONFIG, "reactive.transaction")
.producerProperty(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true)
.producerProperty(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class.getName())
;
return senderOptions;
}
#Test
public void test_When_Publish() {
Employee employee = new Employee();
ProducerRecord<Integer, Employee> producerRecord = new ProducerRecord<Integer, Employee>(REACTIVE_INT_KEY_TOPIC, DEFAULT_KEY, employee);
StepVerifier.create(reactiveKafkaProducerTemplate.send(producerRecord)
.then())
.expectComplete()
.verify();
}
#AfterEach
public void tearDown() {
reactiveKafkaProducerTemplate.close();
}
}
The tests in the framework use an embedded kafka broker.
https://github.com/spring-projects/spring-kafka/tree/main/spring-kafka/src/test/java/org/springframework/kafka/core/reactive
#EmbeddedKafka(topics = ReactiveKafkaProducerTemplateIntegrationTests.REACTIVE_INT_KEY_TOPIC, partitions = 2)
public class ReactiveKafkaProducerTemplateIntegrationTests {
...
added correct serialised with a non-transactional producer. please see the code on top of this page for the answer.
I have a simple MyLibraryApplication which is having code to invoke POST(TransactionControllerImpl.issueBookToMember) and PATCH(TransactionControllerImpl.returnBookTransaction) methods. I have referred some links on net and tried my best to write code to invoke PATCH method. The code can be found in TransactionControllerTest(testBookReturnUsingRestTemplate and testBookReturnUsingMockMvc methods). The code for invoking POST is working fine but the code for invoking PATCH is not working. Control never reaches returnBookTransaction inside TransactionControllerImpl.
Error: Invalid PATCH method.
I am looking for code snippet for TransactionControllerTest.testBookReturnUsingRestTemplate and testBookReturnUsingMockMvc methods. Can someone help me in getting this code into proper shape?
package com.mycompany.techtrial;
import java.util.Map;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestBody;
import com.mycompany.techtrial.Transaction;
public interface TransactionController {
public ResponseEntity<Transaction> issueBookToMember(#RequestBody Map<String, String> params);
public ResponseEntity<Transaction> returnBookTransaction(#PathVariable(name="transaction-id") Long transactionId);
}
/**
*
*/
package com.mycompany.techtrial;
import java.time.LocalDateTime;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Optional;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PatchMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
#RestController
public class TransactionControllerImpl implements TransactionController{
/*
* PLEASE DO NOT CHANGE SIGNATURE OR METHOD TYPE OF END POINTS
* Example Post Request : { "book":"Java8 Primer","member":"Test 1" }
*/
#PostMapping(path = "/api/transaction")
public ResponseEntity<Transaction> issueBookToMember(#RequestBody Map<String, String> params){
String book = params.get("book");
String member = params.get("member");
Transaction transaction = new Transaction();
transaction.setId(1L);
transaction.setBook(book);
transaction.setMember(member);
transaction.setDateOfIssue(LocalDateTime.now());
transaction.setDateOfReturn(Transaction.getDefaultReturnDate());
return ResponseEntity.ok().body(transaction);
}
/*
* PLEASE DO NOT CHANGE SIGNATURE OR METHOD TYPE OF END POINTS
*/
#PatchMapping(path= "/api/transaction/{transaction-id}/return")
public ResponseEntity<Transaction> returnBookTransaction(#PathVariable(name="transaction-id") Long transactionId){
String book = "Java8 Primer";
String member = "Test 1";
Transaction transaction = new Transaction();
transaction.setId(1L);
transaction.setBook(book);
transaction.setMember(member);
transaction.setDateOfIssue(LocalDateTime.now().minusDays(10));
transaction.setDateOfReturn(LocalDateTime.now());
return ResponseEntity.ok().body(transaction);
}
}
package com.mycompany.techtrial;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.web.servlet.MockMvc;
import org.springframework.test.web.servlet.ResultActions;
import org.springframework.test.web.servlet.request.MockMvcRequestBuilders;
import org.springframework.test.web.servlet.setup.MockMvcBuilders;
#RunWith(SpringJUnit4ClassRunner.class)
#SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
public class TransactionControllerTest {
MockMvc mockMvc;
#Mock
private TransactionController transactionController;
#Autowired
private TestRestTemplate template;
#Before
public void setup() throws Exception {
mockMvc = MockMvcBuilders.standaloneSetup(transactionController).build();
}
#Test
public void testBookIssue() throws Exception {
HttpEntity<Object> transaction = getHttpEntity(
"{\"book\": \"Java8 Primer\", \"member\": \"Test 1\" }");
ResponseEntity<Transaction> response = template.postForEntity(
"/api/transaction", transaction, Transaction.class);
Assert.assertEquals("Java8 Primer", response.getBody().getBook());
Assert.assertEquals("Test 1", response.getBody().getMember());
Assert.assertEquals(200,response.getStatusCode().value());
}
#Test
public void testBookReturnUsingRestTemplate() throws Exception {
Long transactionId = new Long(1);
HashMap<String,Long> uriVariables = new HashMap<String,Long>();
uriVariables.put("transaction-id", transactionId);
Transaction transaction = template.patchForObject(
"/api/transaction/{transaction-id}/return",null, Transaction.class, uriVariables);
Assert.assertEquals(new Long(1), transaction.getId());
//Assert.assertEquals(200,response.getStatusCode().value());
}
#Test
public void testBookReturnUsingMockMvc() throws Exception {
Long transactionId = new Long(1);
HashMap<String,Long> uriVariables = new HashMap<String,Long>();
uriVariables.put("transaction-id", transactionId);
ResultActions obj = mockMvc.perform( MockMvcRequestBuilders
.patch("/api/transaction/{transaction-id}/return",transactionId)
.content("")
.contentType(MediaType.APPLICATION_JSON)
.accept(MediaType.APPLICATION_JSON));
System.out.println(obj.getClass());
HttpStatus status = obj.andReturn().getModelAndView().getStatus();
boolean success = status.is2xxSuccessful();
System.out.println("success="+success);
Assert.assertEquals(new Long(1), transactionId);
//Assert.assertEquals(200,response.getStatusCode().value());
}
private HttpEntity<Object> getHttpEntity(Object body) {
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
return new HttpEntity<Object>(body, headers);
}
}
package com.mycompany.techtrial;
import java.io.Serializable;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
public class Transaction implements Serializable {
private static final long serialVersionUID = 8951221480021840448L;
private static final LocalDateTime defaultReturnDate = LocalDateTime.of(LocalDate.of(2299, 12, 31), LocalTime.of(12, 0, 0));
Long id;
private String book;
private String member;
public String getBook() {
return book;
}
public void setBook(String book) {
this.book = book;
}
public String getMember() {
return member;
}
public void setMember(String member) {
this.member = member;
}
//Date and time of issuance of this book
LocalDateTime dateOfIssue;
//Date and time of return of this book
LocalDateTime dateOfReturn;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public LocalDateTime getDateOfIssue() {
return dateOfIssue;
}
public void setDateOfIssue(LocalDateTime dateOfIssue) {
this.dateOfIssue = dateOfIssue;
}
public LocalDateTime getDateOfReturn() {
return dateOfReturn;
}
public void setDateOfReturn(LocalDateTime dateOfReturn) {
this.dateOfReturn = dateOfReturn;
}
#Override
public String toString() {
return "Transaction [id=" + id + ", book=" + book + ", member=" + member + ", dateOfIssue=" + dateOfIssue + ", dateOfReturn=" + dateOfReturn + "]";
}
//#PrePersist
void preInsert() {
if (this.dateOfReturn == null)
this.dateOfReturn = defaultReturnDate;
}
public static LocalDateTime getDefaultReturnDate() {
return defaultReturnDate;
}
}
package com.mycompany.techtrial;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class MyLibraryApplication {
public static void main(String[] args) {
SpringApplication.run(MyLibraryApplication.class, args);
}
}
It seems to be a known issue with the RestTemplate default Http client.
RestTemplate bug
A workaround for this would be to use the apache httpcomponents httpclient library in the RestTemplateBuilder.setRequestFactory and pass that in the constructor to TestRestTemplate
After that you can use the exchange method on the TestRestTemplate class and do a PATCH request.
Sample code to create TestRestTemplate:
Supplier<ClientHttpRequestFactory> supplier = () -> {
return new HttpComponentsClientHttpRequestFactory();
};
restTemplateBuilder.requestFactory(supplier);
TestRestTemplate testRestTemplate = new TestRestTemplate(restTemplateBuilder);
testRestTemplate.exchange("/api/transaction/{transaction-id}/return",HttpMethod.PATCH,null,Transaction.class,uriVariables);
I wanted to know if it is possible to use a ObservableMap to populate a TableView ?
I use ObservableMap instead of ObservableList because I need to add and delete often, so I need to minimize the cost.
My hashMap use an BigInteger as key field and a type with many properties as value field.
In my tableView I just want to display the values with a column per properties. I hope that is clear
Thanks
I've been trying to do this. I guess the post is old but I don't see any answers anywhere on the net. The examples use the map key for columns and then a list of maps for every row. I'd like to see the rows as keys and their associated values. It's a long example.
package tablemap;
import static java.lang.Math.random;
import java.util.Map;
import java.util.TreeMap;
import javafx.application.Application;
import javafx.beans.property.SimpleStringProperty;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.event.ActionEvent;
import javafx.event.EventHandler;
import javafx.scene.Scene;
import javafx.scene.control.Button;
import javafx.scene.control.TableColumn;
import javafx.scene.control.TableColumn.CellEditEvent;
import javafx.scene.control.TableView;
import javafx.scene.control.cell.TextFieldTableCell;
import javafx.scene.layout.VBox;
import javafx.stage.Stage;
public class TableMap extends Application {
#Override
public void start(Stage primaryStage) {
VBox root = new VBox();
Map<String,LineItem> mapData = new TreeMap<>();
for (int i = 0; i < 3; i++)
mapData.put(String.valueOf(random()), new LineItem(String.valueOf(i),"i"));
ObservableList<Map.Entry<String,LineItem>> listData =
FXCollections.observableArrayList(mapData.entrySet());
TableView<Map.Entry<String,LineItem>> tv = new TableView(listData);
TableColumn<Map.Entry<String,LineItem>,String> keyCol = new TableColumn("Key");
keyCol.setCellValueFactory(
(TableColumn.CellDataFeatures<Map.Entry<String,LineItem>, String> p) ->
new SimpleStringProperty(p.getValue().getKey()));
TableColumn<Map.Entry<String,LineItem>,String> lineNoCol = new TableColumn("Line No");
lineNoCol.setCellValueFactory(
(TableColumn.CellDataFeatures<Map.Entry<String,LineItem>, String> p) ->
new SimpleStringProperty(p.getValue().getValue().getLineNo()));
TableColumn<Map.Entry<String,LineItem>,String> descCol = new TableColumn("Desc");
descCol.setCellValueFactory(
(TableColumn.CellDataFeatures<Map.Entry<String,LineItem>, String> p) ->
new SimpleStringProperty(p.getValue().getValue().getDesc()));
descCol.setCellFactory(TextFieldTableCell.forTableColumn());
descCol.setOnEditCommit((CellEditEvent<Map.Entry<String,LineItem>, String> t) -> {
t.getTableView().getItems().get(t.getTablePosition().getRow())
.getValue().setDesc(t.getNewValue());
});
tv.getColumns().addAll(keyCol,lineNoCol, descCol);
tv.setEditable(true);
tv.setColumnResizePolicy(TableView.CONSTRAINED_RESIZE_POLICY);
Button btnOut = new Button("out");
btnOut.setOnAction(new EventHandler<ActionEvent>() {
#Override
public void handle(ActionEvent t) {
for (Map.Entry<String,LineItem> me : mapData.entrySet()){
System.out.println("key "+me.getKey()+" entry "+me.getValue().toCSVString());
}
for (Map.Entry<String,LineItem> me : listData){
System.out.println("key "+me.getKey()+" entry "+me.getValue().toCSVString());
}
}
});
root.getChildren().addAll(tv,btnOut);
Scene scene = new Scene(root, 300, 200);
primaryStage.setTitle("Map Table Test");
primaryStage.setScene(scene);
primaryStage.show();
}
}
And the LineItem Class Code
package tablemap;
import javafx.beans.property.SimpleStringProperty;
import javafx.beans.property.StringProperty;
/* LineItem class */
public class LineItem {
private final StringProperty lineNo = new SimpleStringProperty();
private final StringProperty desc = new SimpleStringProperty();
public LineItem(String ln, String dsc) {
lineNo.set(ln); desc.set(dsc);
}
public String getLineNo() {return (lineNo.getValue() != null) ?lineNo.get():"";}
public void setLineNo(String lineNo) {this.lineNo.set(lineNo);}
public StringProperty lineNoProperty() {return lineNo;}
public String getDesc() {return (desc.getValue() != null) ?desc.get():"";}
public void setDesc(String desc) {this.desc.set(desc);}
public StringProperty descProperty() {return desc;}
public String toCSVString(){
return lineNo.getValueSafe()+","+
desc.getValueSafe()+"\n";
}
}
You can see after editing data and clicking out that changes in the list are reflected in the map. I still have to check the other way and handle insertions and deletions but that shouldn't be to hard.
I packaged up my Map Table listeners in a subclass of TableView.
package tablemap;
import java.util.AbstractMap;
import java.util.Map;
import javafx.collections.FXCollections;
import javafx.collections.ListChangeListener;
import javafx.collections.MapChangeListener;
import javafx.collections.ObservableList;
import javafx.collections.ObservableMap;
import javafx.scene.control.TableView;
public class MapTableView<K,V> extends TableView<Map.Entry<K,V>>{
private final ObservableList<Map.Entry<K,V>> obsList;
private final ObservableMap<K,V> map;
private final MapChangeListener<K,V> mapChange;
private final ListChangeListener<Map.Entry<K,V>> listChange;
public MapTableView(ObservableMap<K,V> map) {
this.map = map;
obsList = FXCollections.observableArrayList(map.entrySet());
setItems(obsList);
mapChange = new MapChangeListener<K, V>() {
#Override
public void onChanged(MapChangeListener.Change<? extends K, ? extends V> change) {
obsList.removeListener(listChange);
if (change.wasAdded())
obsList.add(new AbstractMap.SimpleEntry(change.getKey(),change.getValueAdded()));
if (change.wasRemoved()){
//obsList.remove(new AbstractMap.SimpleEntry(change.getKey(),change.getValueRemoved()));
// ^ doesn't work always, use loop instead
for (Map.Entry<K,V> me : obsList){
if (me.getKey().equals(change.getKey())){
obsList.remove(me);
break;
}
}
}
obsList.addListener(listChange);
}
};
listChange = (ListChangeListener.Change<? extends Map.Entry<K, V>> change) -> {
map.removeListener(mapChange);
while (change.next()){
//maybe check for uniqueness here
if (change.wasAdded()) for (Map.Entry<K, V> me: change.getAddedSubList())
map.put(me.getKey(),me.getValue());
if (change.wasRemoved()) for (Map.Entry<K, V> me: change.getRemoved())
map.remove(me.getKey());
}
map.addListener(mapChange);
};
map.addListener(mapChange);
obsList.addListener(listChange);
}
//adding to list should be unique
public void addUnique(K key, V value){
boolean isFound = false;
//if a duplicate key just change the value
for (Map.Entry<K,V> me : getItems()){
if (me.getKey().equals(key)){
isFound = true;
me.setValue(value);
break;//only first match
}
}
if (!isFound) // add new entry
getItems().add(new AbstractMap.SimpleEntry<>(key,value));
}
//for doing lenghty map operations
public void removeMapListener(){
map.removeListener(mapChange);
}
//for resyncing list to map after many changes
public void resetMapListener(){
obsList.removeListener(listChange);
obsList.clear();
obsList.addAll(map.entrySet());
obsList.addListener(listChange);
map.addListener(mapChange);
}
}
It seems to work so far. I create with the following code :
final ObservableMap<String, LineItem> obsMap = FXCollections.observableHashMap();
final MapTableView<String,LineItem> mtv = new MapTableView(obsMap);
You can even edit the keys.
final TableColumn<Map.Entry<String,LineItem>,String> keyCol = new TableColumn("Key");
keyCol.setCellValueFactory(
(TableColumn.CellDataFeatures<Map.Entry<String,LineItem>, String> p) ->
new SimpleStringProperty(p.getValue().getKey()));
keyCol.setCellFactory(TextFieldTableCell.forTableColumn());
keyCol.setOnEditCommit((CellEditEvent<Map.Entry<String,LineItem>, String> t) -> {
final String oldKey = t.getOldValue();
final LineItem oldLineItem = obsMap.get(oldKey);
obsMap.remove(oldKey);//should remove from list but maybe doesn't always
obsMap.put(t.getNewValue(),oldLineItem);
});
You can see I added a method to remove and re add the map listeners. To add and remove 100k entries takes .65 secs w/out listeners and 5.2 secs with them.
Here's the whole thing in one file on pastebin. http://pastebin.com/NmdTURFt