Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions Engine/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
<app.root>../</app.root>
</properties>
<dependencies>
<dependency>
<!-- <dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka-clients.version}</version>
Expand All @@ -24,7 +24,7 @@
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>${kafka-avro-serializer.version}</version>
</dependency>
</dependency> -->
<dependency>
<groupId>com.ing</groupId>
<artifactId>ingenious-testdata-csv</artifactId>
Expand Down Expand Up @@ -332,10 +332,10 @@
</dependency>
</dependencies>
<repositories>
<repository>
<!-- <repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repository> -->
</repositories>
<build>
<resources>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,13 +35,15 @@
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;

/** Kafka Imports
import org.apache.kafka.common.header.Header;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;
*/

public class Command {

Expand Down Expand Up @@ -142,6 +144,7 @@ public class Command {
* *** Kafka Parameters ****
*/

/** Kafka Parameters
static public Map<String, List<Header>> kafkaHeaders = new HashMap<>();
static public Map<String, String> kafkaProducerTopic = new HashMap<>();
static public Map<String, String> kafkaConsumerTopic = new HashMap<>();
Expand Down Expand Up @@ -179,6 +182,7 @@ public class Command {
static public Map<String, ConsumerRecord> kafkaConsumerRecord = new HashMap<>();
static public Map<String, KafkaProducer> kafkaProducer = new HashMap<>();
static public Map<String, KafkaConsumer> kafkaConsumer = new HashMap<>();
*/

public Command(CommandControl cc) {
Commander = cc;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
/** Kafka Operations related commands

package com.ing.engine.commands.kafka;

import com.fasterxml.jackson.core.JsonParser;
Expand Down Expand Up @@ -1121,3 +1123,5 @@ private static boolean isCompatible(JsonNode value, Schema schema) {
}
}
}

*/
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,13 @@ public void putRestRequest() {
}
}

@Action(object = ObjectType.WEBSERVICE, desc = "POST Rest Request ", input = InputType.YES, condition = InputType.OPTIONAL)
/**
* Previously, the postRestRequest method mandated a payload for executing POST requests.
* Since POST requests do not always require a payload, I have modified the implementation to make the payload optional.
* This change improves flexibility and ensures the framework aligns more closely with standard API practices.
*/

@Action(object = ObjectType.WEBSERVICE, desc = "POST Rest Request ", input = InputType.OPTIONAL, condition = InputType.OPTIONAL)
public void postRestRequest() {
try {
createhttpRequest(RequestMethod.POST);
Expand Down Expand Up @@ -371,6 +377,37 @@ public void storeResponseBodyInDataSheet() {
}
}

/**
* This method will store response headers in datasheet
*/

@Action(object = ObjectType.WEBSERVICE, desc = "Store Response Headers In DataSheet ", input = InputType.YES)
public void storeResponseHeadersInDataSheet() {
try {
String strObj = Input;
if (strObj.matches(".*:.*")) {
try {
System.out.println("Updating value in SubIteration " + userData.getSubIteration());
String sheetName = strObj.split(":", 2)[0];
String columnName = strObj.split(":", 2)[1];
userData.putData(sheetName, columnName, response.get(key).headers().toString());
Report.updateTestLog(Action, "Response headers stored in " + strObj, Status.DONE);
} catch (Exception ex) {
Logger.getLogger(this.getClass().getName()).log(Level.OFF, ex.getMessage(), ex);
Report.updateTestLog(Action, "Error Storing text in datasheet :" + ex.getMessage(), Status.DEBUG);
}
} else {
Report.updateTestLog(Action,
"Given input [" + Input + "] format is invalid. It should be [sheetName:ColumnName]",
Status.DEBUG);
}
} catch (Exception ex) {
Logger.getLogger(this.getClass().getName()).log(Level.OFF, null, ex);
Report.updateTestLog(Action, "Error Storing response body in datasheet :" + "\n" + ex.getMessage(),
Status.DEBUG);
}
}

@Action(object = ObjectType.WEBSERVICE, desc = "Assert XML Element Equals ", input = InputType.YES, condition = InputType.YES)
public void assertXMLelementEquals() {

Expand Down Expand Up @@ -980,6 +1017,13 @@ private void createhttpRequest(RequestMethod requestmethod) throws InterruptedEx
}
}

/**
* The existing HTML reporting displayed only the response payload once a request was marked as Complete.
* This limitation made debugging challenging, particularly in cases where critical information-such as trace IDs was present exclusively in the response headers.
* I have enhanced the reporting to include response headers along with the response payload
* Provides greater visibility and significantly aiding effective debugging
*/

private void savePayload(String reqOrRes, String data) {
String payloadFileName = "";
String path = "";
Expand All @@ -999,6 +1043,11 @@ private void savePayload(String reqOrRes, String data) {
if (location.createNewFile()) {
FileWriter writer = new FileWriter(location);
writer.write(data);
// Appending headers when saving response
if (reqOrRes.equals("response")) {
writer.write("\n\n--- Response Headers ---\n");
writer.write(response.get(key).headers().toString());
}
writer.close();
}
}
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# INGenious Playwright Studio - Test Automation for Everyone

[![Build INGenious Source Code](https://github.com/ing-bank/INGenious/actions/workflows/maven.yml/badge.svg)](https://github.com/ing-bank/INGenious/actions/workflows/maven.yml)
![Static Badge](https://img.shields.io/badge/Version-2.3-%23FF6200)
![Static Badge](https://img.shields.io/badge/Version-2.3.1-%23FF6200)

--------------------------------------------------------------------

Expand Down
8 changes: 4 additions & 4 deletions Resources/Engine/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -155,10 +155,10 @@
</plugins>
</build>
<repositories>
<repository>
<!-- <repository>
<id>confluent</id>
<url>https://packages.confluent.io/maven/</url>
</repository>
</repository> -->
</repositories>
<pluginRepositories/>
<dependencies>
Expand Down Expand Up @@ -189,7 +189,7 @@



<dependency>
<!-- <dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>${kafka-clients.version}</version>
Expand All @@ -198,7 +198,7 @@
<groupId>io.confluent</groupId>
<artifactId>kafka-avro-serializer</artifactId>
<version>${kafka-avro-serializer.version}</version>
</dependency>
</dependency> -->
<dependency>
<groupId>com.ing</groupId>
<artifactId>ingenious-testdata-csv</artifactId>
Expand Down