FileNet ContentElement CheckOut - filenet-p8

I have a code module and has supporting jars as content elements. Whenever I change the java code and want to update the new jar, I do check-out and check-in back. But in this process, when I check-out, all the supporting jars also need to be added manually. Is there a way to just Check-out the jar I want to update leaving behind supported jars?

I wrote a simple code to achieve this. Hope this helps others
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.util.LinkedHashMap;
import java.util.Map;
import javax.security.auth.Subject;
import com.filenet.api.admin.CodeModule;
import com.filenet.api.collection.ContentElementList;
import com.filenet.api.constants.AutoClassify;
import com.filenet.api.constants.CheckinType;
import com.filenet.api.constants.RefreshMode;
import com.filenet.api.constants.ReservationType;
import com.filenet.api.core.Connection;
import com.filenet.api.core.ContentTransfer;
import com.filenet.api.core.Domain;
import com.filenet.api.core.Factory;
import com.filenet.api.core.ObjectStore;
import com.filenet.api.util.UserContext;
public class UpdateCodeModule {
public static void main( String[] arg){
com.filenet.api.admin.CodeModule module=null;
try{
Connection conn = Factory.Connection.getConnection("http://server:port/wsi/FNCEWS40MTOM");
Subject subject = UserContext.createSubject(conn, "user_id", "password", "FileNetP8WSI");
UserContext uc = UserContext.get();
uc.pushSubject(subject);
Domain domain = Factory.Domain.fetchInstance(conn, "domain_name", null);
ObjectStore os = Factory.ObjectStore.fetchInstance(domain, "objectstore name", null);
module = Factory.CodeModule.getInstance(os, "/CodeModules/codemodulename");
module.clearPendingActions();
module.checkout(ReservationType.OBJECT_STORE_DEFAULT, null, "CodeModule", null);
module.save(RefreshMode.REFRESH);
ContentElementList element = Factory.ContentElement.createList();
CodeModule resveration = (CodeModule)module.get_Reservation();
File[] jars = new File("path to jars").listFiles();
LinkedHashMap<String, File> likedMap=new LinkedHashMap<String, File>();;
for (File file : jars) {
if(file.getName().equalsIgnoreCase("codemodule.jar")){
likedMap.put(file.getName(), file);
}
}
for (File file : jars) {
if(!file.getName().equalsIgnoreCase("codemodule.jar")){
likedMap.put(file.getName(), file);
}
}
for (Map.Entry<String, File> file : likedMap.entrySet()) {
ContentTransfer ct = Factory.ContentTransfer.createInstance();
try {
ct.setCaptureSource(new FileInputStream(file.getValue()));
ct.set_RetrievalName(file.getKey());
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
element.add(ct);
}
resveration.set_ContentElements(element);
resveration.checkin(AutoClassify.DO_NOT_AUTO_CLASSIFY, CheckinType.MAJOR_VERSION);
resveration.save(RefreshMode.REFRESH);
resveration.promoteVersion();
}catch(Exception e){
e.printStackTrace();
}finally{
module.cancelCheckout();
}
}
}

Related

Issue with Arrays.stream(body.split(" ")).forEach

Trying to download files from apache.camel.
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Arrays;
import org.apache.camel.builder.RouteBuilder;
import org.springframework.stereotype.Component;
#Component
public class Download extends RouteBuilder{
#Override
public void configure() throws Exception {
from("file:files/input")
.routeId("downloadInitId")
.to("direct:downloadFiles");
from("direct:downloadFiles")
.routeId("downloadProcessId")
.process(p -> {
String body = p.getIn().getBody(String.class);
Arrays.stream(body.split(" ")).forEach(s -> {
try {
URL url = new URL(s);
File f = new File(s);
System.out.println(f.getName());
System.out.println(f.getPath());
InputStream in = url.openStream();
Files.copy(in, Paths.get(".\\files\\download\\output\\"+f.getName()), StandardCopyOption.REPLACE_EXISTING);
in.close();
} catch (IOException e) {
e.getMessage();
}
});
});
}
}
I have a files.txt with the content below, that I will dump on files\input folder,
https://github.com/a.zip
https://github.com/b.zip
The outuput will always be like this,
b.zip
https:\github.com\a.zip
https:\github.com\b.zip
Not sure why a.zip is always getting omitted therefore the first will not be downloaded.
Or if 3 lines, only last filename will be printed or downloaded.
Got it.
from("direct:downloadFiles")
.routeId("downloadProcessId")
.split(body().tokenize("\n")) // add this
.process(p -> {
String body = p.getIn().getBody(String.class);
Arrays.stream(body.split(" ")).forEach(s -> {
String sn = s.toString(); // add this
sn = sn.replace("\n", "").replace("\r", ""); // add this
try {
URL url = new URL(sn); // use sn
File f = new File(sn); // use sn

Call BigQuery stored procedure(Routine) using spring boot

I'm trying to call a Google BigQuery stored procedure (Routine) using Spring boot. I tried all the methods of the routines to extract data. However, it didn't help.
Has anyone ever created and called a BigQuery stored procedure (Routine) through the Spring boot? If so, how?
public static Boolean executeInsertQuery(String query, TableId tableId, String jobName) {
log.info("Starting {} truncate query", jobName);
BigQuery bigquery = GCPConfig.getBigQuery(); // bqClient
// query configuration
QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query)
.setUseLegacySql(false)
.setAllowLargeResults(true)
.setDestinationTable(tableId) .setWriteDisposition(JobInfo.WriteDisposition.WRITE_TRUNCATE).build();
try {
// build the query job.
QueryJob queryJob = new QueryJob.Builder(queryConfig).bigQuery(bigquery).jobName(jobName).build();
QueryJob.Result result = queryJob.execute();
} catch (JobException e) {
log.error("{} unsuccessful. job id: {}, job name: {}. exception: {}", jobName, e.getJobId(),
e.getJobName(), e.toString());
return false;
}
}
package ops.google.com;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryError;
import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.EncryptionConfiguration;
import com.google.cloud.bigquery.InsertAllRequest;
import com.google.cloud.bigquery.InsertAllResponse;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableResult;
import com.google.common.collect.ImmutableList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.google.auth.oauth2.GoogleCredentials;
import com.google.auth.oauth2.ServiceAccountCredentials;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class SelectFromBigQueryFunction {
private static final Logger logger = LogManager.getLogger(SelectFromBigQueryFunction.class);
public boolean tableSelectFromJoin(String key_path) {
String projectID = "ProjectID";
String datasetName = "DataSetName";
String tableName1 = "sample_attribute_type";
String tableName2 = "sample_attribute_value";
boolean status = false;
try {
//Call BQ Function/Routines, functinon name->bq_function_name
//String query = String.format("SELECT DataSetName.bq_function_name(1, 1)");
//Call BQ Stored Procedure, procedure name-> bq_stored_procedure_name
String query = String.format("CALL DataSetName.bq_stored_procedure_name()");
File credentialsPath = new File(key_path);
FileInputStream serviceAccountStream = new FileInputStream(credentialsPath);
GoogleCredentials credentials = ServiceAccountCredentials.fromStream(serviceAccountStream);
// Initialize client that will be used to send requests. This client only needs to be created
BigQuery bigquery = BigQueryOptions.newBuilder()
.setProjectId(projectID)
.setCredentials(credentials)
.build().getService();
QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
TableResult results = bigquery.query(queryConfig);
results.iterateAll().forEach(row -> row.forEach(val -> System.out.printf("%s,", val.toString())));
logger.info("Query performed successfully with encryption key.");
status = true;
} catch (BigQueryException | InterruptedException e) {
logger.error("Query not performed \n" + e.toString());
}catch(Exception e){
logger.error("Some Exception \n" + e.toString());
}return status;
}
}

BDD Jbehave stories while executing results in Pending

Recently I started working on BDD using JBehave.
So far if I run using maven, my maven project is getting successfully build. And then its coming into the story file but then its not proceeding further.
I tried by running with junit but I am getting the same result..
I think my problem is with executor file.
I searched in many sites and even Jbehave.org and many stackoverflow queries..But in vain
Help me to come out of this problem...Let me know if you need any additional information
I spent so much time rectifying this.But couldn't able to find the solution.
Here is my runner file..
package runnerFile;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.jbehave.core.configuration.Configuration;
import org.jbehave.core.configuration.MostUsefulConfiguration;
import org.jbehave.core.io.CodeLocations;
import org.jbehave.core.io.LoadFromClasspath;
import org.jbehave.core.io.StoryFinder;
import org.jbehave.core.junit.JUnitStories;
import org.jbehave.core.junit.JUnitStory;
import org.jbehave.core.reporters.Format;
import org.jbehave.core.reporters.StoryReporterBuilder;
import org.jbehave.core.steps.InjectableStepsFactory;
import org.jbehave.core.steps.InstanceStepsFactory;
import org.jbehave.core.steps.ScanningStepsFactory;
import org.jbehave.core.steps.Steps;
public class TestRunner extends JUnitStories{
#Override
public Configuration configuration() {
return new MostUsefulConfiguration()
.useStoryLoader(
new LoadFromClasspath(this.getClass().getClassLoader()))
.useStoryReporterBuilder(
new StoryReporterBuilder()
.withDefaultFormats()
.withFormats(Format.HTML, Format.CONSOLE)
.withRelativeDirectory("jbehave-report")
);
}
#Override
public InjectableStepsFactory stepsFactory() {
// ArrayList<Object> stepFileList = new ArrayList<Object>();
ArrayList<Steps> stepFileList = new ArrayList<Steps>();
stepFileList.add(new Steps(configuration()));
return new InstanceStepsFactory(configuration(), stepFileList);
//return new ScanningStepsFactory(configuration(), "org.jbehave.examples.core.steps", "my.other.steps"`enter code here` ).matchingNames(".*Steps").notMatchingNames(".*SkipSteps");
}
#Override
protected List<String> storyPaths() {
return new StoryFinder().
findPaths(CodeLocations.codeLocationFromClass(
this.getClass()),
Arrays.asList("**/TC_2.story"),
Arrays.asList(""));
}
}
I kept my story file inside src/test/resources . and step definition inside src/test/java
****story:****
**src/test/resources**
Narrative:
In order to communicate effectively to the business some functionality
As a development team
I want to use Behaviour-Driven Development
Scenario: A scenario is a collection of executable steps of different type
Given I launch the url
When I login with username <Username> and password <Password>
Then I should see the homepage
Examples:
|Username|Password|
|test#gmail.com|test1234|
**stepDefinition**
**src/test/java:**
package definition;
import org.jbehave.core.annotations.Given;
import org.jbehave.core.annotations.Named;
import org.jbehave.core.annotations.Then;
import org.jbehave.core.annotations.When;
import pages.Homepage_Pages;
public class HomePage {
Homepage_Pages home;
#Given("I launch the url")
public void url()
{
home.launchUrl();
}
#When("I login with username <Username> and password <Password>")
public void login(#Named("Username") String Username, #Named("Password") String Password)
{
System.out.println(Username);
}
#Then("I should see the homepage")
public void homePageVerification()
{
System.out.println("Heello");
}
}
Maven Console:
Try the following code, which is a stripped-down simple testrunner that does nothing fancy, but simply runs all stories found in sub-folders of the main folder, and includes all step classes in the define steps files location. My original had a lot of those things hard-coded but I changed them to final Strings so it should be easy enough to replace your situation and run with this file. Obviously, change "com.yourpackage.steps" with whatever package folder you place your steps files in. Hope this helps.
package testrunner;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
import org.jbehave.core.configuration.Configuration;
import org.jbehave.core.configuration.MostUsefulConfiguration;
import org.jbehave.core.embedder.EmbedderControls;
import org.jbehave.core.io.CodeLocations;
import org.jbehave.core.io.StoryFinder;
import org.jbehave.core.junit.JUnitStories;
import org.jbehave.core.reporters.CrossReference;
import org.jbehave.core.reporters.Format;
import org.jbehave.core.reporters.StoryReporterBuilder;
import org.jbehave.core.steps.InjectableStepsFactory;
import org.jbehave.core.steps.InstanceStepsFactory;
import org.junit.runner.RunWith;
import de.codecentric.jbehave.junit.monitoring.JUnitReportingRunner;
#RunWith(JUnitReportingRunner.class)
public class TestRunner extends JUnitStories {
private Configuration configuration;
public TestRunner() {
super();
CrossReference crossReference = new CrossReference();
configuration = new MostUsefulConfiguration();
configuration.useStoryReporterBuilder(
new StoryReporterBuilder().withFormats(Format.HTML, Format.STATS, Format.CONSOLE)
.withCodeLocation(CodeLocations.codeLocationFromPath("target/."))
.withCrossReference(crossReference));
EmbedderControls embedderControls = configuredEmbedder().embedderControls();
embedderControls.doBatch(false);
embedderControls.doGenerateViewAfterStories(true);
embedderControls.doSkip(false);
embedderControls.doVerboseFailures(true);
embedderControls.doVerboseFiltering(true);
embedderControls.useThreads(1);
embedderControls.useStoryTimeouts("1800");
}
#Override
protected List<String> storyPaths()
{
return new StoryFinder().findPaths(CodeLocations.codeLocationFromClass(this.getClass()), "**/*.story", "");
}
#Override
public Configuration configuration() {
return configuration;
}
#Override
public InjectableStepsFactory stepsFactory() {
final String stepsPackage = "com.yourpackage.steps";
final String stepsLoc = "src/test/java/" + stepsPackage.replace(".", "/");
List<Object> stepList = new ArrayList<Object>();
File steps = new File(stepsLoc);
File[] fileList = steps.listFiles();
int size = fileList.length;
for (int i = 0; i < size; i++) {
if (fileList[i].isFile()) { // also returns folders (directories)
String value = fileList[i].getName().replace(".java", ""); // strip extensions
if (!value.toLowerCase().contains("testrunner")) { // ignore testrunner itself
try {
Object stepObject = Class.forName((stepsPackage + "." + value)).newInstance();
stepList.add(stepObject);
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
}
}
return new InstanceStepsFactory(configuration(), stepList);
}
}

I update avatar image and display it but the avatar does not change in Spring Boot , why?

I use Spring boot to develop a website.I develop a function of uploading avatar image , when I update avatar image and display it but the avatar does not change and the image already has changed in the folder, why?
The problem is http cache?
my simple project:
dropbox link to my project
(note: you need to change the local path in TestingController)
You are not able to see the uploaded picture instantly because you save the image in the static files of your application. You are able to see the the saved image in your folder but not if you call your url. If you refresh your running project after uploading the file (project root and hit F5) you should be able to see it.
But the better solution would be that you have a #RequestMapping to load the picture and show it in browser.
Build on your project, try to use it like this:
import java.io.IOException;
import java.net.MalformedURLException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.stream.Stream;
import org.json.JSONObject;
import org.springframework.core.io.Resource;
import org.springframework.core.io.UrlResource;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.multipart.MultipartFile;
#Controller
public class TestingController {
private final Path p = Paths.get("/Users/devin/Documents/workspace-sts-3.8.1.RELEASE/testing/src/main/resources/static/uploads/images");
#RequestMapping(value={ "/", "/home"})
public String home(){
return "home";
}
#RequestMapping(value = "/post_upload_avatar_file", method = RequestMethod.POST)
#ResponseBody
public Object uploadAvatarFile(#RequestParam("uploadfile") MultipartFile uploadfile) {
JSONObject resJsonData=new JSONObject();
try {
if(uploadfile.isEmpty()){
System.out.println("Empty");
}
Files.copy(uploadfile.getInputStream(), p.resolve(uploadfile.getOriginalFilename()));
resJsonData.put("status", 200);
resJsonData.put("message", "Success!");
resJsonData.put("data", uploadfile.getOriginalFilename());
}catch (Exception e) {
System.out.println(e.getMessage());
resJsonData.put("status", 400);
resJsonData.put("message", "Upload Image Error!");
resJsonData.put("data", "");
}
return resJsonData.toString();
}
#GetMapping("files/{filename:.+}")
#ResponseBody
public ResponseEntity<Resource> serverFile(#PathVariable String filename){
Resource file = loadAsResource(filename);
return ResponseEntity
.ok()
.body(file);
}
public Resource loadAsResource(String filename) {
try {
Path file = p.resolve(filename);
Resource resource = new UrlResource(file.toUri());
if(resource.exists() || resource.isReadable()) {
return resource;
}
else {
System.out.println("no file");
}
} catch (MalformedURLException e) {
System.out.println(e);
}
return null;
}
public Stream<Path> loadAll() {
try {
return Files.walk(p, 1)
.filter(path -> !path.equals(p))
.map(path -> p.relativize(path));
} catch (IOException e) {
System.out.println(e);
}
return null;
}
}
In this code I dont implement exception and error handling.
You can upload your picture like it was. And then you can call another url to receive the picture in your browser.
http://localhost:8080/files/testing.png
And the picture should be the response.
Please have a look to those two sources for a complete solution.
Spring file upload - backend
Spring file upload - frontend

How to use Hadoop FS API inside Storm Bolt in java

I want to store the data in hdfs which is emitted by Storm Spout. I have added hadoop FS API code in Bolt Class, but It is throwing compilation error with storm.
Following is the Storm bolt Class :
package bolts;
import java.io.*;
import java.util.*;
import java.net.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
import backtype.storm.topology.BasicOutputCollector;
import backtype.storm.topology.OutputFieldsDeclarer;
import backtype.storm.topology.base.BaseBasicBolt;
import backtype.storm.tuple.Fields;
import backtype.storm.tuple.Tuple;
import backtype.storm.tuple.Values;
public class DataNormalizer extends BaseBasicBolt {
public void execute(Tuple input, BasicOutputCollector collector) {
String sentence = input.getString(0);
String[] process = sentence.split(" ");
int n = 1;
String rec = "";
try {
String filepath = "/root/data/top_output.csv";
String dest = "hdfs://localhost:9000/user/root/nishu/top_output/top_output_1.csv";
Configuration conf = new Configuration();
FileSystem fileSystem = FileSystem.get(conf);
System.out.println(fileSystem);
Path srcPath = new Path(source);
Path dstPath = new Path(dest);
String filename = source.substring(source.lastIndexOf('/') + 1,
source.length());
try {
if (!(fileSystem.exists(dstPath))) {
FSDataOutputStream out = fileSystem.create(dstPath, true);
InputStream in = new BufferedInputStream(
new FileInputStream(new File(source)));
byte[] b = new byte[1024];
int numBytes = 0;
while ((numBytes = in.read(b)) > 0) {
out.write(b, 0, numBytes);
}
in.close();
out.close();
} else {
fileSystem.copyFromLocalFile(srcPath, dstPath);
}
} catch (Exception e) {
System.err.println("Exception caught! :" + e);
System.exit(1);
} finally {
fileSystem.close();
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
I have added hadoop jars in CLASSPATH also..
Following is the value of classpath :
$STORM_HOME/storm-0.8.1.jar:$JAVA_HOME/lib/:$HADOOP_HOME/hadoop-core-1.0.4.jar:$HADOOP_HOME/lib/:$STORM_HOME/lib/
Also copied hadoop libraries : hadoop-cor-1.0.4.jar, commons-collection-3.2.1.jar and commons-cli-1.2.jar in Storm/lib directory.
When I am building this project, It is throwing following error :
3006 [Thread-16] ERROR backtype.storm.daemon.executor -
java.lang.NoClassDefFoundError: org/apache/commons/configuration/Configuration
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<init>(DefaultMetricsSystem.java:37)
at org.apache.hadoop.metrics2.lib.DefaultMetricsSystem.<clinit>(DefaultMetricsSystem.java:34)
at org.apache.hadoop.security.UgiInstrumentation.create(UgiInstrumentation.java:51)
at org.apache.hadoop.security.UserGroupInformation.initialize(UserGroupInformation.java:216)
at org.apache.hadoop.security.UserGroupInformation.ensureInitialized(UserGroupInformation.java:184)
at org.apache.hadoop.security.UserGroupInformation.isSecurityEnabled(UserGroupInformation.java:236)
at org.apache.hadoop.security.UserGroupInformation.getLoginUser(UserGroupInformation.java:466)
at org.apache.hadoop.security.UserGroupInformation.getCurrentUser(UserGroupInformation.java:452)
at org.apache.hadoop.fs.FileSystem$Cache$Key.<init>(FileSystem.java:1494)
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:1395)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:254)
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:123)
at bolts.DataNormalizer.execute(DataNormalizer.java:67)
at backtype.storm.topology.BasicBoltExecutor.execute(BasicBoltExecutor.java:32)
......................
The error message tells you that Apache commons configuration is missing. You have to add it to the classpath.
More generally, you should add all Hadoop dependencies to your classpath. You can find them using a dependency manager (Maven, Ivy, Gradle etc.) or look into /usr/lib/hadoop/lib on a machine on which Hadoop is installed.

Resources