How to merge files under different folders with same name using maven plugin - maven

I have a requirement to merge all the files with same name under two different folders and output that to another folder.
Ex:
common
|
V1.sql
V2.sql
module
|
V1.sql
V2.sql
Now my target directory should be
target dir
|
V1.sql(has both the contents of common and module)
V2.sql(has both the contents of common and module)
I have looked at several maven plugins, but they dont seemed to support this. Please guide if anyone has come across such problem or implemented a plugin,

Not really pretty, but i achived this with the maven exec plugin:
<plugin>
<groupId>org.codehaus.mojo</groupId>
<artifactId>exec-maven-plugin</artifactId>
<version>1.4.0</version>
<executions>
<execution>
<id>resourceMerge</id>
<goals>
<goal>java</goal>
</goals>
<phase>prepare-package</phase>
<configuration>
<mainClass>mavenProcessor.Resourcesmerger</mainClass>
<arguments>
<argument>${project.build.directory}/resourcesDefault</argument>
<argument>${project.build.directory}/resourcesProfile</argument>
<argument>${project.build.directory}/resourcesMerged</argument>
</arguments>
</configuration>
</execution>
</executions>
</plugin>
The Processor looks like this
package mavenProcessor;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.stream.Collectors;
public class Resourcesmerger {
public static void main(String[] args) throws Throwable {
Path inputDir1 = new File(args[0]).toPath();
Path inputDir2 = new File(args[1]).toPath();
Path outputDir = new File(args[2]).toPath();
copyAppending(inputDir1, outputDir);
if (Files.exists(inputDir2)) {
copyAppending(inputDir2, outputDir);
}
}
private static void copyAppending(Path inputDir1, Path outputDir) throws IOException, FileNotFoundException {
List<Path> defaultResources = Files.walk(inputDir1).collect(Collectors.toList());
for (Path path : defaultResources) {
if (Files.isRegularFile(path)) {
Path relativePath = inputDir1.relativize(path);
Path targetPath = outputDir.resolve(relativePath);
targetPath.getParent().toFile().mkdirs();
try (FileOutputStream fos = new FileOutputStream(targetPath.toFile(), true)) {
System.out.println("Merge " + path + " to " + targetPath);
System.out.flush();
Files.copy(path, fos);
}
}
}
}
}

Related

Accessing files in a Jar using ClassPathResource

I have a spring application that i must convert to jar. In this application I have a unit test:
#BeforeEach
void setUp() throws IOException {
//facturxHelper = new FacturxHelper();
facturxService = new FacturxService();
// String pdf = "facture.pdf"; // invalid pdfa1
String pdf = "resources/VALID PDFA1.pdf";
// InputStream sourceStream = new FileInputStream(pdf); //
InputStream sourceStream = getClass().getClassLoader().getResourceAsStream(pdf);
byte[] sourceBytes = IOUtils.toByteArray(sourceStream);
this.b64Pdf = Base64.getEncoder().encodeToString(sourceBytes);
}
#Test
void createFacturxMin() throws Exception {
// on va créer une facturX avec l'objet request
FacturxRequestMin request = FacturxRequestMin.builder()
.pdf(this.b64Pdf)
.chorusPro(Boolean.FALSE)
.invoiceNumber("FA-2017-0010")
.issueDate("13/11/2017")
.buyerReference("SERVEXEC")
.seller(TradeParty.builder()
.name("Au bon moulin")
.specifiedLegalOrganization(LegalOrganization.builder()
.id("99999999800010") .scheme(SpecifiedLegalOrganizationScheme.FR_SIRENE.getSpecifiedLegalOrganizationScheme())
.build())
.postalAddress(PostalAddress.builder()
.countryId(CountryIso.FR.name())
.build())
.vatId("FR11999999998")
.build())
.buyer(TradeParty.builder()
.name("Ma jolie boutique")
.specifiedLegalOrganization(LegalOrganization.builder()
.id("78787878400035")
.scheme(SpecifiedLegalOrganizationScheme.FR_SIRENE.getSpecifiedLegalOrganizationScheme())
.build())
.build())
.headerMonetarySummation(HeaderMonetarySummation.builder()
.taxBasisTotalAmount("624.90")
.taxTotalAmount("46.25")
.prepaidAmount("201.00")
.grandTotalAmount("671.15")
.duePayableAmount("470.15")
.build())
.build();
FacturXAppManager facturXAppManager = new FacturXAppManager(facturxService);
FacturxResponse facturxResponse = facturXAppManager.createFacturxMin(request);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(facturxResponse);
System.out.println(json);
}
The aim of the application is to create an xml and to embed it into the pdf file.
My issue is concerning an xml validation through xsd.
Here is an abstract of the code :
public static boolean xmlValidator(String fxGuideLine, String xmlString) throws Exception {
System.out.println("xmlValidator() called");
File xsdFile = null;
Source source = new StreamSource(new StringReader(xmlString));
// i removed a lot of if else statement concerning files which allow to validate xml
try {
xsdFile = new ClassPathResource(FacturxConstants.FACTUR_X_MINIMUM_XSD).getFile();
} catch (IOException e) {
throw new FacturxException(e.getMessage());
}
// validation du contenu XML
try {
SchemaFactory schemaFactory = SchemaFactory
.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = schemaFactory.newSchema(xsdFile);
Validator validator = schema.newValidator();
validator.validate(source);
return true;
} catch (SAXException | IOException e) {
throw new FacturxException(e.getLocalizedMessage());
}
...
}
In constants class, I added path to the xsd file:
public static final String FACTUR_X_MINIMUM_XSD = "resources/xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd";
In my POM file I do want to put the resources files in the built jar.
<build>
<finalName>${project.artifactId}</finalName>
<resources>
<resource>
<directory>src/main/resources</directory>
<includes>
<include>*</include>
</includes>
</resource>
</resources>
<plugins>
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>3.3.0</version>
<configuration>
<outputDirectory> ${project.build.outputDirectory}\resources</outputDirectory>
</configuration>
</plugin>
<plugin>
<artifactId>maven-assembly-plugin</artifactId>
<version>3.4.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>single</goal>
</goals>
</execution>
</executions>
<configuration>
<descriptorRefs>
<descriptorRef>jar-with-dependencies</descriptorRef>
</descriptorRefs>
</configuration>
</plugin>
</plugins>
</build>
When I do a simple maven clean package, everything is running perfectly.
So far so good.
Next step is where my problem comes. Let's consider i want to use this dependency in an another application (a spring boot application). The previous jar compiled is a high level API that i want to integrate.
I launched the following command line :
mvn install:install-file -Dfile=myapi.jar -DgroupId=fr.myapi -DartifactId=graph-api-sharepoint -Dversion=1.0.0-SNAPSHOT -Dpackaging=jar
I do add my dependency correctly in my new project. that's perfect.
To check if my import worked correctly, i created a simple unit test with the same code (I do have a VALID PDFA1 in my resources folder. So far so good.
When running the test I do have the following error:
class path resource [resources/xsd/BASIC-WL_XSD/FACTUR-X_BASIC-WL.xsd] cannot be resolved to absolute file path because it does not reside in the file system: jar:file:/.m2/repository/fr/myapi/1.1.0/myapi-1.1.0.jar!/resources/xsd/BASIC-WL_XSD/FACTUR-X_BASIC-WL.xsd
How can i fix this issue ? I read many post but not fixes solved my issue. I do also think that i will have an issue also while compiling the springboot app as a jar
As mentionned, using a File won't work.
In the current code I updated it using InputStream:
InputStream is = new ClassPathResource(FacturxConstants.FACTUR_X_MINIMUM_XSD).getInputStream();
xsdSource = new StreamSource(is);
if my xsd path doesn't have resources:
public static final String FACTUR_X_MINIMUM_XSD = "xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd";
I have the following exception:
class path resource [xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd] cannot be opened because it does not exist
If i do put
public static final String FACTUR_X_MINIMUM_XSD = "resources/xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd";
the response is the following:
src-resolve: Cannot resolve the name 'ram:ExchangedDocumentContextType' to a(n) 'type definition' component.
I updated also the SchemaFactory and schema implementation:
SchemaFactory schemaFactory =
SchemaFactory.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = schemaFactory.newSchema(xsdSource);
Validator validator = schema.newValidator();
validator.validate(source);
return true;
public static final String FACTUR_X_MINIMUM_XSD = "resources/xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd";
Is wrong it should be (assuming src/main/resources/xsd is the actual location you are using).
public static final String FACTUR_X_MINIMUM_XSD = "/xsd/MINIMUM_XSD/FACTUR-X_MINIMUM.xsd";
Then your code is using a java.io.File which won't work, as a java.io.File needs to be a physical file on the file system. Which this isn't as it is inside a jar file. You need to use an InputStream.
public static boolean xmlValidator(String fxGuideLine, String xmlString) throws Exception {
System.out.println("xmlValidator() called");
Source source = new StreamSource(new StringReader(xmlString));
// i removed a lot of if else statement concerning files which allow to validate xml
try {
InputStream xsd = new ClassPathResource(FacturxConstants.FACTUR_X_MINIMUM_XSD).getInputStream();
StreamSource xsdSource = new StreamSource(xsd);
SchemaFactory schemaFactory = SchemaFactory
.newInstance(XMLConstants.W3C_XML_SCHEMA_NS_URI);
Schema schema = schemaFactory.newSchema(xsdSource);
Validator validator = schema.newValidator();
validator.validate(source);
return true;
} catch (SAXException | IOException e) {
throw new FacturxException(e.getLocalizedMessage());
}
...
}
Which loads the schema using an inputstream.
Thanks to M. Deinum, I was able to find out a solution. I had to use indeed StreamSource. This didn't solve the following issue:
src-resolve: Cannot resolve the name 'ram:ExchangedDocumentContextType' to a(n) 'type definition' component.
As I used several xsd files, I implemented a way to retrieve a list of sources using PathMatchingResourcePatternResolver (from spring)
private static Source[] buildSources(String fxGuideLine, String pattern) throws SAXException, IOException {
List<Source> sources = new ArrayList<>();
PathMatchingResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
Resource[] resources = patternResolver.getResources(pattern);
for (Resource resource : resources) {
StreamSource dtd = new StreamSource(resource.getInputStream());
dtd.setSystemId(resource.getURI().toString());
sources.add(dtd);
}
return sources.toArray(new Source[sources.size()]);
}

Java Spring Boot: How to access local WSDL instead of a Public WSDL URL?

I have been using a public WSDL URL to make a call to our customer. Now the customer decided to hide the public WSDL URL and I have been asked to use a local WSDL that I need to deploy on my own server.
I'm using Java Spring Boot and here's my previous code to call the public WSDL URL:
try {
SaajSoapMessageFactory messageFactory= new SaajSoapMessageFactory(MessageFactory.newInstance());
messageFactory.afterPropertiesSet();
WebServiceTemplate webServiceTemplate = new WebServiceTemplate( messageFactory);
Jaxb2Marshaller marshaller = new Jaxb2Marshaller();
marshaller.setContextPath(appConfig.SOAP_PKG);
marshaller.afterPropertiesSet();
webServiceTemplate.setMarshaller(marshaller);
webServiceTemplate.setUnmarshaller(marshaller);
webServiceTemplate.afterPropertiesSet();
WebServiceMessageSender messageSender = this.webServiceMessageSender();
webServiceTemplate.setMessageSender(messageSender);
try {
response = webServiceTemplate.marshalSendAndReceive(soapURL, request, new WebServiceMessageCallback() {
#Override
public void doWithMessage(WebServiceMessage message) {
try {
SoapHeader soapHeader = ((SoapMessage) message).getSoapHeader();
Map mapRequest = new HashMap();
mapRequest.put("loginuser", soapUsername);
mapRequest.put("loginpass", soapPassword);
StrSubstitutor substitutor = new StrSubstitutor(mapRequest, "%(", ")");
String finalXMLRequest = substitutor.replace(appConfig.SOAP_HEADER);
StringSource headerSource = new StringSource(finalXMLRequest);
Transformer transformer = TransformerFactory.newInstance().newTransformer();
transformer.transform(headerSource, soapHeader.getResult());
} catch (Exception e) {
logger.error("Error while invoking session service :", e.getMessage() );
}
}
});
}catch (SoapFaultClientException e){
logger.error("Error while invoking session service : " + e.getMessage());
}
....
How am I supposed now to replace "soapURL" which is the public WSDL URL used in marshalSendAndReceive with the local wsdl?
I used wsd2ljava to generate the sources in eclipse as shown below.
<plugin>
<groupId>org.apache.cxf</groupId>
<artifactId>cxf-codegen-plugin</artifactId>
<version>3.1.12</version>
<executions>
<execution>
<id>generate-sources</id>
<phase>generate-sources</phase>
<configuration>
<sourceRoot>${project.build.directory}/generated/cxf</sourceRoot>
<wsdlOptions>
<wsdlOption>
<wsdl>${basedir}/src/main/resources/wsdl/XXX.wsdl</wsdl>
<wsdlLocation>classpath:wsdl/XXX.wsdl</wsdlLocation>
</wsdlOption>
</wsdlOptions>
<fork>always</fork>
</configuration>
<goals>
<goal>wsdl2java</goal>
</goals>
</execution>
</executions>
</plugin>
Java classes have been created. What is the next step after generating all the classes? Shall I create a CXF client configuration?

.jar created from maven shade plugin throws error when accessing resources under src/main/resources, but running main from exploded .jar works?

Updated Exec Summary of Solution
Following up from the answer provided by Victor, I implemented a Java class that lists the contents of a folder resource in the classpath. Most critical for me was that this had to work when the class path resource is discovered when executing from the IDE, from an exploded uberjar, or from within an unexploded uberjar (which I typically create with the maven shade plugin.) Class and associated unit test available here.
Original Question
I am seeing strange behavior with the maven-shade-plugin and class path resources when I run very simple
java Test program that access a directory structure in a standard maven project like this:
src/main
Test.java
resources/
resource-directory
spark
junk1
zeppelin
junk2
When run from the IDE or the exploded maven shaded .jar (please see below)
it works correctly, which means it prints this:.
result of directory contents as classpath resource:[spark, zeppelin]
The source is as follows:
import org.apache.commons.io.IOUtils;
import java.io.IOException;
import java.io.InputStream;
public class Tester {
public void test(String resourceName) throws IOException {
InputStream in = this.getClass().getClassLoader().getResourceAsStream(resourceName);
System.out.println("input stream: " + in);
Object result = IOUtils.readLines(in);
System.out.println("result of directory contents as classpath resource:" + result);
}
public static void main(String[] args) throws IOException {
new Tester().test("resource-directory");
}
}
Now, if I run mvn clean install in my project and run the
maven shaded .jar under ${project.dir}target, I see the following exception:
> java -jar target/sample.jar
Exception in thread "main" java.lang.NullPointerException
at java.io.FilterInputStream.read(FilterInputStream.java:133)
at sun.nio.cs.StreamDecoder.readBytes(StreamDecoder.java:284)
at sun.nio.cs.StreamDecoder.implRead(StreamDecoder.java:326)
at sun.nio.cs.StreamDecoder.read(StreamDecoder.java:178)
at java.io.InputStreamReader.read(InputStreamReader.java:184)
at java.io.BufferedReader.fill(BufferedReader.java:161)
at java.io.BufferedReader.readLine(BufferedReader.java:324)
at java.io.BufferedReader.readLine(BufferedReader.java:389)
at org.apache.commons.io.IOUtils.readLines(IOUtils.java:1030)
at org.apache.commons.io.IOUtils.readLines(IOUtils.java:987)
at org.apache.commons.io.IOUtils.readLines(IOUtils.java:968)
at Tester.test(Tester.java:16)
at Tester.main(Tester.java:24)
Running with Exploded .jar
> mkdir explode/
> cd explode/
> jar xvf ../sample.jar
......
inflated: META-INF/MANIFEST.MF
created: META-INF/
etc etc.
> ls # look at contents of exploded .jar:
logback.xml META-INF org resource-directory Tester.class
#
# now run class with CLASSPATH="."
(master) /tmp/maven-shade-non-working-example/target/explode > java Tester
input stream: java.io.ByteArrayInputStream#70dea4e
result of directory contents as classpath resource:[spark, zeppelin] # <<<- works !
I have the whole project here: https://github.com/buildlackey/maven-shade-non-working-example
but for convenience, here is the pom.xml(below), with two maven shade configs that I tried.
Note: I don't think the IncludeResourceTransformer would be of any use because my resources are appearing
at the appropriate levels in the .jar file.
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/maven-v4_0_0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.foo.core</groupId>
<artifactId>sample</artifactId>
<packaging>jar</packaging>
<version>1.0-SNAPSHOT</version>
<name>sample</name>
<url>http://maven.apache.org</url>
<properties>
<jdk.version>1.8</jdk.version>
<junit.version>4.11</junit.version>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
<scope>test</scope>
</dependency>
<dependency><!-- commons-io: Easy conversion from stream to string list, etc.-->
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
<version>2.4</version>
</dependency>
</dependencies>
<build>
<finalName>sample</finalName>
<plugins>
<!-- Set a compiler level -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>2.3.2</version>
<configuration>
<source>${jdk.version}</source>
<target>${jdk.version}</target>
</configuration>
</plugin>
<!-- Maven Shade Plugin -->
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<executions>
<!-- Run shade goal on package phase -->
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<!-- add Main-Class to manifest file -->
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>Tester</mainClass>
</transformer>
<!-- tried with the stanza below enabled, and also disabled: in both cases, got exceptions from runs -->
<transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
<resource>src/main/resources/</resource>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
anyway, thanks in advance for any help you can provide ~
chris
UPDATE
This didn't work for me in Spring when I tried it (but I'd be interested if anyone has success with a Spring approach). I have a working alternative which I will post shortly. But if you care to comment on how to fix this broken Spring attempt, I'd be very interested.
import org.springframework.core.io.Resource;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.core.io.support.ResourcePatternResolver;
import java.io.IOException;
public class Tester {
public void test(String resourceName) throws IOException {
ResourcePatternResolver resourceResolver = new PathMatchingResourcePatternResolver();
Resource[] resources = resourceResolver.getResources("resource-directory/*");
for (Resource resource : resources) {
System.out.println("resource: " + resource.getDescription());
}
}
public static void main(String[] args) throws IOException {
new Tester().test("resource-directory/*");
}
}
The problem is that getResourceAsStream can read only files as a stream, not folders, from a jar file.
To read folder contents from a jar file you might need to use the approach, like described in the accepted answer to this question:
How can I get a resource "Folder" from inside my jar File?
To supplement the answer from my good friend Victor, here is a full code solution. below. The full project is available here
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipException;
import java.util.zip.ZipFile;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* List entries of a subfolder of an entry in the class path, which may consist of file system folders and .jars.
*/
public class ClassPathResourceFolderLister {
private static final Logger LOGGER = LoggerFactory.getLogger(ClassPathResourceFolderLister.class);
/**
* For each entry in the classpath, verify that (a) "folder" exists, and (b) "folder" has child content, and if
* these conditions hold, return the child entries (be they files, or folders). If neither (a) nor (b) are true for
* a particular class path entry, move on to the next entry and try again.
*
* #param folder the folder to match within the class path entry
*
* #return the subfolder items of the first matching class path entry, with a no duplicates guarantee
*/
public static Collection<String> getFolderListing(final String folder) {
final String classPath = System.getProperty("java.class.path", ".");
final String[] classPathElements = classPath.split(System.getProperty("path.separator"));
List<String> classPathElementsList = new ArrayList<String> ( Arrays.asList(classPathElements));
return getFolderListingForFirstMatchInClassPath(folder, classPathElementsList);
}
private static Collection<String>
getFolderListingForFirstMatchInClassPath(final String folder, List<String> classPathElementsList) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("getFolderListing for " + folder + " with classpath elements " + classPathElementsList);
}
Collection<String> retval = new HashSet<String>();
String cleanedFolder = stripTrailingAndLeadingSlashes(folder);
for (final String element : classPathElementsList) {
System.out.println("class path element:" + element);
retval = getFolderListing(element, cleanedFolder);
if (retval.size() > 0) {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("found matching folder in class path list. returning: " + retval);
}
return retval;
}
}
return retval;
}
private static String stripTrailingAndLeadingSlashes(final String folder) {
String stripped = folder;
if (stripped.equals("/")) { // handle degenerate case:
return "";
} else { // handle cases for strings starting or ending with "/", confident that we have at least two characters
if (stripped.endsWith("/")) {
stripped = stripped.substring(0, stripped.length()-1);
}
if (stripped.startsWith("/")) {
stripped = stripped.substring(1, stripped.length());
}
if (stripped.startsWith("/") || stripped.endsWith("/")) {
throw new IllegalArgumentException("too many consecutive slashes in folder specification: " + stripped);
}
}
return stripped;
}
private static Collection<String> getFolderListing( final String element, final String folderName) {
final File file = new File(element);
if (file.isDirectory()) {
return getFolderContentsListingFromSubfolder(file, folderName);
} else {
return getResourcesFromJarFile(file, folderName);
}
}
private static Collection<String> getResourcesFromJarFile(final File file, final String folderName) {
final String leadingPathOfZipEntry = folderName + "/";
final HashSet<String> retval = new HashSet<String>();
ZipFile zf = null;
try {
zf = new ZipFile(file);
final Enumeration e = zf.entries();
while (e.hasMoreElements()) {
final ZipEntry ze = (ZipEntry) e.nextElement();
final String fileName = ze.getName();
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("zip entry fileName:" + fileName);
}
if (fileName.startsWith(leadingPathOfZipEntry)) {
final String justLeafPartOfEntry = fileName.replaceFirst(leadingPathOfZipEntry,"");
final String initSegmentOfPath = justLeafPartOfEntry.replaceFirst("/.*", "");
if (initSegmentOfPath.length() > 0) {
LOGGER.trace(initSegmentOfPath);
retval.add(initSegmentOfPath);
}
}
}
} catch (Exception e) {
throw new RuntimeException("getResourcesFromJarFile failed. file=" + file + " folder=" + folderName, e);
} finally {
if (zf != null) {
try {
zf.close();
} catch (IOException e) {
LOGGER.error("getResourcesFromJarFile close failed. file=" + file + " folder=" + folderName, e);
}
}
}
return retval;
}
private static Collection<String> getFolderContentsListingFromSubfolder(final File directory, String folderName) {
final HashSet<String> retval = new HashSet<String>();
try {
final String fullPath = directory.getCanonicalPath() + "/" + folderName;
final File subFolder = new File(fullPath);
System.out.println("fullPath:" + fullPath);
if (subFolder.isDirectory()) {
final File[] fileList = subFolder.listFiles();
for (final File file : fileList) {
retval .add(file.getName());
}
}
} catch (final IOException e) {
throw new Error(e);
}
return retval;
}
}

CXF InInterceptor not firing

I have created web service. It works fine. Now I'm trying to implement authentication to it. I'm using CXF interceptors for that purpose. For some reason interceptors won't fire. What am I missing? This is my first web service.
import javax.annotation.Resource;
import javax.inject.Inject;
import javax.jws.WebMethod;
import javax.jws.WebParam;
import javax.jws.WebService;
import javax.xml.ws.WebServiceContext;
import org.apache.cxf.interceptor.InInterceptors;
#WebService
#InInterceptors(interceptors = "ws.BasicAuthAuthorizationInterceptor")
public class Service {
#WebMethod
public void test(#WebParam(name = "value") Integer value) throws Exception {
System.out.println("Value = " + value);
}
}
-
package ws;
import java.io.IOException;
import java.io.OutputStream;
import java.net.HttpURLConnection;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.apache.cxf.binding.soap.interceptor.SoapHeaderInterceptor;
import org.apache.cxf.configuration.security.AuthorizationPolicy;
import org.apache.cxf.endpoint.Endpoint;
import org.apache.cxf.interceptor.Fault;
import org.apache.cxf.message.Exchange;
import org.apache.cxf.message.Message;
import org.apache.cxf.transport.Conduit;
import org.apache.cxf.ws.addressing.EndpointReferenceType;
public class BasicAuthAuthorizationInterceptor extends SoapHeaderInterceptor {
#Override
public void handleMessage(Message message) throws Fault {
System.out.println("**** GET THIS LINE TO CONSOLE TO SEE IF INTERCEPTOR IS FIRING!!!");
AuthorizationPolicy policy = message.get(AuthorizationPolicy.class);
// If the policy is not set, the user did not specify credentials.
// 401 is sent to the client to indicate that authentication is required.
if (policy == null) {
sendErrorResponse(message, HttpURLConnection.HTTP_UNAUTHORIZED);
return;
}
String username = policy.getUserName();
String password = policy.getPassword();
// CHECK USERNAME AND PASSWORD
if (!checkLogin(username, password)) {
System.out.println("handleMessage: Invalid username or password for user: "
+ policy.getUserName());
sendErrorResponse(message, HttpURLConnection.HTTP_FORBIDDEN);
}
}
private boolean checkLogin(String username, String password) {
if (username.equals("admin") && password.equals("admin")) {
return true;
}
return false;
}
private void sendErrorResponse(Message message, int responseCode) {
Message outMessage = getOutMessage(message);
outMessage.put(Message.RESPONSE_CODE, responseCode);
// Set the response headers
#SuppressWarnings("unchecked")
Map<String, List<String>> responseHeaders = (Map<String, List<String>>) message
.get(Message.PROTOCOL_HEADERS);
if (responseHeaders != null) {
responseHeaders.put("WWW-Authenticate", Arrays.asList(new String[] { "Basic realm=realm" }));
responseHeaders.put("Content-Length", Arrays.asList(new String[] { "0" }));
}
message.getInterceptorChain().abort();
try {
getConduit(message).prepare(outMessage);
close(outMessage);
} catch (IOException e) {
e.printStackTrace();
}
}
private Message getOutMessage(Message inMessage) {
Exchange exchange = inMessage.getExchange();
Message outMessage = exchange.getOutMessage();
if (outMessage == null) {
Endpoint endpoint = exchange.get(Endpoint.class);
outMessage = endpoint.getBinding().createMessage();
exchange.setOutMessage(outMessage);
}
outMessage.putAll(inMessage);
return outMessage;
}
private Conduit getConduit(Message inMessage) throws IOException {
Exchange exchange = inMessage.getExchange();
EndpointReferenceType target = exchange.get(EndpointReferenceType.class);
Conduit conduit = exchange.getDestination().getBackChannel(inMessage, null, target);
exchange.setConduit(conduit);
return conduit;
}
private void close(Message outMessage) throws IOException {
OutputStream os = outMessage.getContent(OutputStream.class);
os.flush();
os.close();
}
}
I'm fighting with this for few days now. Don't know what to google any more. Help is appreciated.
I've found solution. I was missing the following line in MANIFEST.MF file in war project:
Dependencies: org.apache.cxf
Maven wasn't includint this line by himself so I had to find workaround. I found about that here. It says: When using annotations on your endpoints / handlers such as the Apache CXF ones (#InInterceptor, #GZIP, ...) remember to add the proper module dependency in your manifest. Otherwise your annotations are not picked up and added to the annotation index by JBoss Application Server 7, resulting in them being completely and silently ignored.
This is where I found out how to change MANIFEST.MF file.
In short, I added custom manifest file to my project and referenced it in pom.xml. Hope this helps someone.
The answer provided by Felix is accurate. I managed to solve the problem using his instructions. Just for completion here is the maven config that lets you use your own MANIFEST.MF file placed in the META-INF folder.
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-war-plugin</artifactId>
<configuration>
<archive>
<manifestFile>src/main/resources/META-INF/MANIFEST.MF</manifestFile>
</archive>
</configuration>
</plugin>
and here is the relevant content of the content of the MANIFEST.MF file I was using.
Manifest-Version: 1.0
Description: yourdescription
Dependencies: org.apache.ws.security,org.apache.cxf

ECMASCRIPT 5 with wro4j and Google Closure Compiler

We are using wro4j with Google Closure and Maven to minify our JS. By default it does not suport strict mode in the JS ("use strict";).. it just strips it out. Is there any configuration I can do in pom.xml or somewhere else to get it to leave use strict in there?
This is the configuration for google closure complier to do it:
--language_in=ECMASCRIPT5_STRICT
Not sure how to plug that in to Wro4j. Any ideas?
Create a custom implementation of the manager factory which adds ECMAScript5:
public class MyCustomWroManagerFactory
extends DefaultStandaloneContextAwareManagerFactory
{
#Override
protected ProcessorsFactory newProcessorsFactory()
{
final SimpleProcessorsFactory factory = new SimpleProcessorsFactory();
factory.addPreProcessor(
new GoogleClosureCompressorProcessor(
CompilerOptions.LanguageMode.ECMASCRIPT5_STRICT
)
);
return factory;
}
}
Reference it in the pom.xml as the value of the wroManagerFactory node:
<configuration>
<wroManagerFactory>com.mycompany.MyCustomWroManagerFactory</wroManagerFactory>
</configuration>
According to John Lenz from the Closure Compiler project, if you are using the Compiler API directly, you should specify a CodingConvention.
References
GoogleClosureCompressorProcessor.java - method setCompilerOptions
GoogleClosureCompressorProcessor.java - optionsPool method
Closure Compiler Service API Reference - language |  Closure Compiler  |  Google Developers
It's a bit more complicated in wro4j-maven-plugin 1.8, but not that bad.
You need to add two Java classes. First override newCompilerOptions of GoogleClosureCompressorProcessor like so:
package com.example.package.wro;
import com.google.javascript.jscomp.CheckLevel;
import com.google.javascript.jscomp.ClosureCodingConvention;
import com.google.javascript.jscomp.CompilerOptions;
import com.google.javascript.jscomp.DiagnosticGroups;
import java.nio.charset.Charset;
import org.apache.commons.lang3.CharEncoding;
import ro.isdc.wro.extensions.processor.js.GoogleClosureCompressorProcessor;
/**
* Custom processor overriding `newCompilerOptions` to add custom compiler options.
*
* Original author: Alex Objelean.
*/
public class CustomGoogleClosureCompressorProcessor extends GoogleClosureCompressorProcessor {
/**
* Encoding to use.
*/
public static final String ENCODING = CharEncoding.UTF_8;
#Override
protected CompilerOptions newCompilerOptions() {
final CompilerOptions options = new CompilerOptions();
// Set the language_in option on the Google Closure Compiler to prevent errors like:
// "JSC_TRAILING_COMMA. Parse error. IE8 (and below)"
options.setLanguageIn(CompilerOptions.LanguageMode.ECMASCRIPT5);
/**
* According to John Lenz from the Closure Compiler project, if you are using the Compiler API directly, you should
* specify a CodingConvention. {#link http://code.google.com/p/wro4j/issues/detail?id=155}
*/
options.setCodingConvention(new ClosureCodingConvention());
// use the wro4j encoding by default
//options.setOutputCharset(Charset.forName(getEncoding()));
setEncoding(ENCODING);
options.setOutputCharset(Charset.forName(ENCODING));
// set it to warning, otherwise compiler will fail
options.setWarningLevel(DiagnosticGroups.CHECK_VARIABLES, CheckLevel.WARNING);
return options;
}
}
You'll notice I've commented out the line getEncoding. This is because it's private. I also added setEncoding just in case.
Then we need the Custom manger:
package com.example.package.wro;
import ro.isdc.wro.manager.factory.standalone.DefaultStandaloneContextAwareManagerFactory;
import ro.isdc.wro.model.resource.processor.factory.ProcessorsFactory;
import ro.isdc.wro.model.resource.processor.factory.SimpleProcessorsFactory;
/**
* Custom manger adding custom processor.
*/
public class CustomWroManagerFactory extends DefaultStandaloneContextAwareManagerFactory {
#Override
protected ProcessorsFactory newProcessorsFactory() {
final SimpleProcessorsFactory factory = new SimpleProcessorsFactory();
factory.addPreProcessor(
new CustomGoogleClosureCompressorProcessor()
);
return factory;
}
}
And then use it in your pom.xml in wroManagerFactory. Something like so:
<plugin>
<groupId>ro.isdc.wro4j</groupId>
<artifactId>wro4j-maven-plugin</artifactId>
<version>1.8.0</version>
<executions>
<execution>
<phase>compile</phase>
<goals>
<goal>run</goal>
</goals>
</execution>
</executions>
<!-- Google Closure Compiler -->
<!-- http://www.gzfs020.com/using-google-closure-compiler-with-wro4j-maven-plugin.html -->
<configuration>
<contextFolder>${basedir}/src/main</contextFolder>
<wroFile>${basedir}/src/main/config/wro.xml</wroFile>
<destinationFolder>${project.build.directory}/${project.build.finalName}/min</destinationFolder>
<!--
<wroManagerFactory>ro.isdc.wro.extensions.manager.standalone.GoogleStandaloneManagerFactory</wroManagerFactory>
-->
<wroManagerFactory>com.example.package.wro.CustomWroManagerFactory</wroManagerFactory>
</configuration>
</plugin>

Resources