public void Deposite() throws Exception
{
try
{
Class.forName("com.mysql.jdbc.Driver");
String url = "jdbc:mysql://localhost:3306/bank";
Connection con = DriverManager.getConnection(url,"root","admin");
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
System.out.print("Enter your A/c no. : " );
acNo = Integer.parseInt(br.readLine());
String sql = "SELECT Name,Ac_No,Balance FROM CUSTOMER WHERE Ac_No=?";
PreparedStatement ps = con.prepareStatement(sql);
ps.setInt(1,acNo);
ResultSet rs = ps.executeQuery();
while(rs.next())
{
String name = rs.getString("Name");
int acNo = rs.getInt("Ac_No");
float bal = rs.getFloat("Balance");
System.out.println(" "+name+" "+acNo+" "+bal);
}
System.out.println("Current Bal : "+bal);
BufferedReader br1 = new BufferedReader(new InputStreamReader(System.in));
System.out.print("Enter Deposite Amt : ");
amt = Float.parseFloat(br1.readLine());
bal = bal + amt;
//System.out.println("Current Bal : "+bal);
String sql1 = "UPDATE CUSTOMER SET Balance = ? WHERE Ac_No =?";
ps = con.prepareStatement(sql1);
ps.setInt(1,acNo);
ps.setFloat(2,bal);
int i = ps.executeUpdate();
System.out.println("New Balance updated.... "+i);
System.out.println("Transaction Successful....");
ps.close();
con.close();
}
catch(Exception e)
{
System.out.println(e);
}
}
sir..i am not getting the the Balance after while loop...and when i try to up-date it...it shows zero value for balance in console...while it is still contains that value what i inserted at first during creating a/c...
plz hlp me......console output
mysql workbench o/p
Example code. Try-with-resources takes care of closing, even when an exception is thrown.
static class Customer {
int acNo;
String name;
BigDecimal bal;
}
public static void main(String[] args)
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
System.out.print("Enter your A/c no. : " );
int acNo = Integer.parseInt(br.readLine());
String url = "jdbc:mysql://localhost:3306/bank";
try (Connection con = DriverManager.getConnection(url, "root", "admin")) {
Customer customer = loadCustomer(acNo);
if (customer == null) {
System.out.println("Wrong account");
} else {
System.out.printf("Current balance for %s, account %d: %12.2f%n",
customer.name, customer.acNo, customer.bal);
}
} catch (SQLException e) {
e.printStacktrace();
}
}
private static Customer loadCustomer(int accNo) throws SQLException {
String sql = "SELECT Name, Balance FROM CUSTOMER WHERE Ac_No = ?";
try (PreparedStatement ps = con.prepareStatement(sql)) {
ps.setInt(1, acNo);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
Customer customer = new Customer();
customer.acNo = acNo;
customer.name = rs.getString(1);
customer.bal = rs.getBigDecimal(2);
return customer;
}
}
}
return null;
}
Related
I need to create a REST controller which extracts data from a database and write it into CSV files that will ultimately be zipped together. Each CSV file should contain exactly 10 lines. Eventually all CSV files should be zipped into a one zip file. I want everything to happen on the fly, meaning - saving files to a temporary location on the disk is not an option. Can someone provide me with an example?
I found a very nice code to export huge amount of rows from database into several csv files and zip it.
I think this is a nice code that can assist alot of developers.
I have tested the solution and you can find the entire example at : https://github.com/idaamit/stream-from-db/tree/master
The conroller is :
#GetMapping(value = "/employees/{employeeId}/cars") #ResponseStatus(HttpStatus.OK) public ResponseEntity<StreamingResponseBody> getEmployeeCars(#PathVariable int employeeId) {
log.info("Going to export cars for employee {}", employeeId);
String zipFileName = "Cars Of Employee - " + employeeId;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment;filename=" + zipFileName + ".zip")
.body(
employee.getCars(dataSource, employeeId));
The employee class, first checks if we need to prepare more than one csv or not :
public class Employee {
public StreamingResponseBody getCars(BasicDataSource dataSource, int employeeId) {
StreamingResponseBody streamingResponseBody = new StreamingResponseBody() {
#Override
public void writeTo(OutputStream outputStream) throws IOException {
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource);
String sqlQuery = "SELECT [Id], [employeeId], [type], [text1] " +
"FROM Cars " +
"WHERE EmployeeID=? ";
PreparedStatementSetter preparedStatementSetter = new PreparedStatementSetter() {
public void setValues(PreparedStatement preparedStatement) throws SQLException {
preparedStatement.setInt(1, employeeId);
}
};
StreamingZipResultSetExtractor zipExtractor = new StreamingZipResultSetExtractor(outputStream, employeeId, isMoreThanOneFile(jdbcTemplate, employeeId));
Integer numberOfInteractionsSent = jdbcTemplate.query(sqlQuery, preparedStatementSetter, zipExtractor);
}
};
return streamingResponseBody;
}
private boolean isMoreThanOneFile(JdbcTemplate jdbcTemplate, int employeeId) {
Integer numberOfCars = getCount(jdbcTemplate, employeeId);
return numberOfCars >= StreamingZipResultSetExtractor.MAX_ROWS_IN_CSV;
}
private Integer getCount(JdbcTemplate jdbcTemplate, int employeeId) {
String sqlQuery = "SELECT count([Id]) " +
"FROM Cars " +
"WHERE EmployeeID=? ";
return jdbcTemplate.queryForObject(sqlQuery, new Object[] { employeeId }, Integer.class);
}
}
This class StreamingZipResultSetExtractor is responsible to split the csv streaming data into several files and zip it.
#Slf4j
public class StreamingZipResultSetExtractor implements ResultSetExtractor<Integer> {
private final static int CHUNK_SIZE = 100000;
public final static int MAX_ROWS_IN_CSV = 10;
private OutputStream outputStream;
private int employeeId;
private StreamingCsvResultSetExtractor streamingCsvResultSetExtractor;
private boolean isInteractionCountExceedsLimit;
private int fileCount = 0;
public StreamingZipResultSetExtractor(OutputStream outputStream, int employeeId, boolean isInteractionCountExceedsLimit) {
this.outputStream = outputStream;
this.employeeId = employeeId;
this.streamingCsvResultSetExtractor = new StreamingCsvResultSetExtractor(employeeId);
this.isInteractionCountExceedsLimit = isInteractionCountExceedsLimit;
}
#Override
#SneakyThrows
public Integer extractData(ResultSet resultSet) throws DataAccessException {
log.info("Creating thread to extract data as zip file for employeeId {}", employeeId);
int lineCount = 1; //+1 for header row
try (PipedOutputStream internalOutputStream = streamingCsvResultSetExtractor.extractData(resultSet);
PipedInputStream InputStream = new PipedInputStream(internalOutputStream);
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(InputStream))) {
String currentLine;
String header = bufferedReader.readLine() + "\n";
try (ZipOutputStream zipOutputStream = new ZipOutputStream(outputStream)) {
createFile(employeeId, zipOutputStream, header);
while ((currentLine = bufferedReader.readLine()) != null) {
if (lineCount % MAX_ROWS_IN_CSV == 0) {
zipOutputStream.closeEntry();
createFile(employeeId, zipOutputStream, header);
lineCount++;
}
lineCount++;
currentLine += "\n";
zipOutputStream.write(currentLine.getBytes());
if (lineCount % CHUNK_SIZE == 0) {
zipOutputStream.flush();
}
}
}
} catch (IOException e) {
log.error("Task {} could not zip search results", employeeId, e);
}
log.info("Finished zipping all lines to {} file\\s - total of {} lines of data for task {}", fileCount, lineCount - fileCount, employeeId);
return lineCount;
}
private void createFile(int employeeId, ZipOutputStream zipOutputStream, String header) {
String fileName = "Cars for Employee - " + employeeId;
if (isInteractionCountExceedsLimit) {
fileCount++;
fileName += " Part " + fileCount;
}
try {
zipOutputStream.putNextEntry(new ZipEntry(fileName + ".csv"));
zipOutputStream.write(header.getBytes());
} catch (IOException e) {
log.error("Could not create new zip entry for task {} ", employeeId, e);
}
}
}
The class StreamingCsvResultSetExtractor is responsible for transfer the data from the resultset into csv file. There is more work to do to handle special character set which are problematic in csv cell.
#Slf4j
public class StreamingCsvResultSetExtractor implements ResultSetExtractor<PipedOutputStream> {
private final static int CHUNK_SIZE = 100000;
private PipedOutputStream pipedOutputStream;
private final int employeeId;
public StreamingCsvResultSetExtractor(int employeeId) {
this.employeeId = employeeId;
}
#SneakyThrows
#Override
public PipedOutputStream extractData(ResultSet resultSet) throws DataAccessException {
log.info("Creating thread to extract data as csv and save to file for task {}", employeeId);
this.pipedOutputStream = new PipedOutputStream();
ExecutorService executor = Executors.newSingleThreadExecutor();
executor.submit(() -> {
prepareCsv(resultSet);
});
return pipedOutputStream;
}
#SneakyThrows
private Integer prepareCsv(ResultSet resultSet) {
int interactionsSent = 1;
log.info("starting to extract data to csv lines");
streamHeaders(resultSet.getMetaData());
StringBuilder csvRowBuilder = new StringBuilder();
try {
int columnCount = resultSet.getMetaData().getColumnCount();
while (resultSet.next()) {
for (int i = 1; i < columnCount + 1; i++) {
if(resultSet.getString(i) != null && resultSet.getString(i).contains(",")){
String strToAppend = "\"" + resultSet.getString(i) + "\"";
csvRowBuilder.append(strToAppend);
} else {
csvRowBuilder.append(resultSet.getString(i));
}
csvRowBuilder.append(",");
}
int rowLength = csvRowBuilder.length();
csvRowBuilder.replace(rowLength - 1, rowLength, "\n");
pipedOutputStream.write(csvRowBuilder.toString().getBytes());
interactionsSent++;
csvRowBuilder.setLength(0);
if (interactionsSent % CHUNK_SIZE == 0) {
pipedOutputStream.flush();
}
}
} finally {
pipedOutputStream.flush();
pipedOutputStream.close();
}
log.debug("Created all csv lines for Task {} - total of {} rows", employeeId, interactionsSent);
return interactionsSent;
}
#SneakyThrows
private void streamHeaders(ResultSetMetaData resultSetMetaData) {
StringBuilder headersCsvBuilder = new StringBuilder();
for (int i = 1; i < resultSetMetaData.getColumnCount() + 1; i++) {
headersCsvBuilder.append(resultSetMetaData.getColumnLabel(i)).append(",");
}
int rowLength = headersCsvBuilder.length();
headersCsvBuilder.replace(rowLength - 1, rowLength, "\n");
pipedOutputStream.write(headersCsvBuilder.toString().getBytes());
}
}
In order to test this, you need to execute http://localhost:8080/stream-demo/employees/3/cars
I want to import data from a csv file to elasticsearch. But I don't want to use logstatsh. So, what are the ways I can do this ?
Any blogs ? Docs ?
I came across TransportClient, but I'm not getting the point from where to start .
Thanks in advance.
very late answer, however :) ….This is for elasticsearch 7.6.0
//this class for keeping csv each row values
public class Document {
private String id;
private String documentName;
private String name;
private String title;
private String dob;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getDocumentName() {
return documentName;
}
public void setDocumentName(String documentName) {
this.documentName = documentName;
}
public String getName() {
return name1;
}
public void setName(String name1) {
this.name1 = name1;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDob() {
return dob;
}
public void setDob(String dob) {
this.dob = dob;
}
}
public void bulkInsert() {
long starttime = System.currentTimeMillis();
logger.debug("ElasticSearchServiceImpl => bulkInsert Service Started");
BufferedReader br = null;
String line = "";
String cvsSplitBy = ",";
BulkRequest request;
Document document;
//elastic Search Index Name
String esIndex = "post";
try {
br = new BufferedReader(new FileReader(<path to CSV>));
request = new BulkRequest();
while ((line = br.readLine()) != null) {
// use comma as separator
String[] row = line.split(cvsSplitBy);
if(row.length >= 1) {
//filling Document object using csv columns array
document = getDocEntity(row);
//adding each filled obect into BulkRequest
request.add(getIndexRequest(document, esIndex));
} else {
logger.info("ElasticSearchServiceImpl => bulkInsert : null row ="+row.toString());
}
}
br.close();
if(request.numberOfActions()>0) {
BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT);
if(bulkResponse.hasFailures()) {
logger.error("ElasticSearchServiceImpl => bulkInsert : Some of the record has failed.Please reinitiate the process");
} else {
logger.info("ElasticSearchServiceImpl => bulkInsert : Success");
}
} else {
logger.info("ElasticSearchServiceImpl => bulkInsert : No request for BulkInsert ="+request.numberOfActions());
}
} catch (Exception e) {
logger.error("ElasticSearchServiceImpl => bulkInsert : Exception =" + e.getMessage());
}
long endTime = System.currentTimeMillis();
logger.info("ElasticSearchServiceImpl => bulkInsert End" + Util.DB_AVG_RESP_LOG + "" + (endTime - starttime));
}
public static Document getDocEntity(String[] row)throws Exception {
Document document = new Document();
document.setId(UUID.randomUUID().toString());
for(int i=0;i<row.length;i++) {
switch (i) {
case 0:
document.setDocumentName(row[i]);
break;
case 1:
document.setName(row[i]);
break;
case 7:
document.setTitle(row[i]);
break;
case 8:
document.setDob(row[i]);
break;
}
return document;
}
public static IndexRequest getIndexRequest(Document document,String index)throws Exception {
IndexRequest indexRequest = null;
Map<String, Object> jsonMap = new HashMap<>();
jsonMap.put("doc_name",document.getDocumentName());
jsonMap.put("title",document.getTitle());
jsonMap.put("dob",document.getDob());
indexRequest = new IndexRequest(index).id(document.getId()).source(jsonMap);
return indexRequest;
}
If you need to show each response, you can use the following code for responses
for (BulkItemResponse bulkItemResponse : bulkResponse) {
DocWriteResponse itemResponse = bulkItemResponse.getResponse();
switch (bulkItemResponse.getOpType()) {
case INDEX:
case CREATE:
IndexResponse indexResponse = (IndexResponse) itemResponse;
break;
}
}
For more information please read official link
I have list of tasks. When I delete my one of the task my list still shows that task in the list but on server side i have updated list I am not able to refresh my list. I am getting new task array from server but not able to show it. When I launched my app again then it is showing the updated list. How can I get updated list without killing the app? Both the times I have updated array but not able to show it on the view.
public class ModelClass {
private String message;
private String statusCode;
private Response[] res = null;
protected transient PropertyChangeSupport _propertyChangeSupport = new PropertyChangeSupport(this);
public ModelClass() {
super();
clickEvent(new ActionEvent());
}
public static String taskId;
public void setTaskId(String taskId) {
System.out.print(taskId);
this.taskId = taskId;
}
public String getTaskId() {
return taskId;
}
public PropertyChangeSupport getPropertyChangeSupport() {
return _propertyChangeSupport;
}
public void setMessage(String message) {
String oldMessage = this.message;
this.message = message;
_propertyChangeSupport.firePropertyChange("message", oldMessage, message);
}
public String getMessage() {
return message;
}
public void setStatusCode(String statusCode) {
String oldStatusCode = this.statusCode;
this.statusCode = statusCode;
_propertyChangeSupport.firePropertyChange("statusCode", oldStatusCode, statusCode);
}
public String getStatusCode() {
return statusCode;
}
public void setRes(Response[] res) {
Response[] oldRes = this.res;
this.res = res;
System.out.println(res);
_propertyChangeSupport.firePropertyChange("res", oldRes, res);
System.out.println("refreshing here ");
}
public Response[] getRes() {
return res;
}
#Override
public String toString() {
return "ClassPojo [response = " + res + ", message = " + message + ", statusCode = " + statusCode + "]";
}
public void addPropertyChangeListener(PropertyChangeListener l) {
_propertyChangeSupport.addPropertyChangeListener(l);
}
public void removePropertyChangeListener(PropertyChangeListener l) {
_propertyChangeSupport.removePropertyChangeListener(l);
}
public class Response {
private String taskId;
private String taskType;
private Integer taskTime;
private String taskName;
private PropertyChangeSupport _propertyChangeSupport = new PropertyChangeSupport(this);
Response(String taskId, String taskType, String taskName) {
super();
this.taskId = taskId;
this.taskType = taskType;
this.taskName = taskName;
}
public void setTaskId(String taskId) {
String oldTaskId = this.taskId;
this.taskId = taskId;
_propertyChangeSupport.firePropertyChange("taskId", oldTaskId, taskId);
}
public String getTaskId() {
return taskId;
}
public void setTaskType(String taskType) {
String oldTaskType = this.taskType;
this.taskType = taskType;
_propertyChangeSupport.firePropertyChange("taskType", oldTaskType, taskType);
}
public String getTaskType() {
return taskType;
}
public void setTaskTime(Integer taskTime) {
Integer oldTaskTime = this.taskTime;
this.taskTime = taskTime;
_propertyChangeSupport.firePropertyChange("taskTime", oldTaskTime, taskTime);
}
public Integer getTaskTime() {
return taskTime;
}
public void setTaskName(String taskName) {
String oldTaskName = this.taskName;
this.taskName = taskName;
_propertyChangeSupport.firePropertyChange("taskName", oldTaskName, taskName);
}
public String getTaskName() {
return taskName;
}
#Override
public String toString() {
return "ClassPojo [taskId = " + taskId + ", taskType = " + taskType + ", taskTime = " + taskTime +
", taskName = " + taskName + "]";
}
public void addPropertyChangeListener(PropertyChangeListener l) {
_propertyChangeSupport.addPropertyChangeListener(l);
}
public void removePropertyChangeListener(PropertyChangeListener l) {
_propertyChangeSupport.removePropertyChangeListener(l);
}
}
protected transient ProviderChangeSupport providerChangeSupport = new ProviderChangeSupport(this);
public void addProviderChangeListener(ProviderChangeListener l) {
providerChangeSupport.addProviderChangeListener(l);
}
public void removeProviderChangeListener(ProviderChangeListener l) {
providerChangeSupport.removeProviderChangeListener(l);
}
public void clickEvent(ActionEvent actionEvent) {
try {
JSONObject paramsMap = new JSONObject();
paramsMap.put("userId", "1");
HttpURLConnection httpURLConnection = null;
try {
URL url = new URL("http://ec2-54-226-57-153.compute-1.amazonaws.com:8080/#########");
httpURLConnection = (HttpURLConnection) url.openConnection();
httpURLConnection.setReadTimeout(120000);
httpURLConnection.setConnectTimeout(120000);
httpURLConnection.setRequestMethod("POST");
httpURLConnection.setRequestProperty("Content-Type", "application/json");
httpURLConnection.setDoInput(true);
httpURLConnection.setDoOutput(true);
httpURLConnection.connect();
OutputStream os = httpURLConnection.getOutputStream();
BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
os.write(paramsMap.toString().getBytes());
bufferedWriter.flush();
bufferedWriter.close();
os.close();
if (httpURLConnection.getResponseCode() == HttpURLConnection.HTTP_OK) {
InputStream is = httpURLConnection.getInputStream();
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(is, "UTF-8"));
StringBuilder builder = new StringBuilder();
String line = bufferedReader.readLine();
while (line != null) {
builder.append(line + "\n");
line = bufferedReader.readLine();
}
is.close();
if (httpURLConnection != null)
httpURLConnection.disconnect();
System.out.println(builder.toString());
JSONObject json = new JSONObject(builder.toString());
String status = json.optString("statusCode");
String message = json.optString("message");
String response = json.optString("response");
System.out.println(status);
System.out.println(message);
// System.out.println(response);
JSONArray objarr = json.optJSONArray("response");
Response[] temp_res = new Response[objarr.length()];
for (int i = 0; i < objarr.length(); i++) {
System.out.println(objarr.getJSONObject(i));
JSONObject obj = objarr.getJSONObject(i);
String task = obj.optString("taskName");
taskId = obj.optString("taskId");
String taskType = obj.optString("taskType");
System.out.println(task);
System.out.println(taskId);
System.out.println(taskType);
temp_res[i] = new Response(taskId, taskType, task);
}
setRes(temp_res);
} else {
if (httpURLConnection != null)
httpURLConnection.disconnect();
System.out.println("Invalid response from the server");
}
} catch (Exception e) {
e.printStackTrace();
if (httpURLConnection != null)
httpURLConnection.disconnect();
} finally {
if (httpURLConnection != null)
httpURLConnection.disconnect();
}
} catch (Exception e) {
e.printStackTrace();
}
}
I think you need to add providerChangeSupport.fireProviderRefresh("res");
and you have to make public method for providerChangeSupport.
Here is the link : https://community.oracle.com/message/13203364#13203364
Other than the fix suggested by Rashi Verma, there is one more change required.
The code snippet:
res = (Response[]) res_new.toArray(new Response[res_new.size()]);
setRes(res);
needs to be changed to:
Response[] temp_res;
temp_res = (Response[]) res_new.toArray(new
Response[res_new.size()]);
setRes(temp_res);
Currently, because the value of res is changed before invoking setRes, the propertyChangeEvent is not fired inside setRes. Both this propertyChangeEvent and the providerRefresh are needed for the changes you are making to start reflecting on the UI.
I am having this method
public List<Course> getCourses() throws InvalidCourseDataException {
ArrayList<Course> allCoursesFromDB = new ArrayList<>();
Connection dbConnection = null;
String getFromTableSQL = "SELECT * FROM courses";
try {
dbConnection = getDBConnection();
statement = dbConnection.createStatement();
resultSet = statement.executeQuery(getFromTableSQL);
while (resultSet.next()) {
String courseCategory = resultSet.getString("course_category");
String courseTitle = resultSet.getString("course_title");
int courseId = resultSet.getInt("course_id");
Date startDate = resultSet.getTimestamp("starting_date");
Date endDate = resultSet.getDate("ending_date");
String description = resultSet.getString("description");
int teacherId = resultSet.getInt("teacher_id");
Course course = new Course(courseCategory, courseTitle, startDate, endDate);
course.setCourseId(courseId);
course.setTeacherId(teacherId);
course.setDescription(description);
addParticipantsIdToCourse(course);
allCoursesFromDB.add(course);
}
The getDBConnection() method is
private static Connection getDBConnection() {
System.out.println("-------- MySQL JDBC Connection Testing ------------");
Connection dbConnection = null;
try {
Class.forName(DB_DRIVER);
} catch (ClassNotFoundException e) {
System.out.println(e.getMessage());
}
try {
dbConnection = DriverManager.getConnection(DB_URL, DB_USER, DB_PASSWORD);
return dbConnection;
} catch (SQLException e) {
System.out.println(e.getMessage());
}
return dbConnection;
}
My problem is that resultSet.next() is returning only the first row from DB. I'am sure that I have multiple rows. I saw JDBC ResultSet is giving only one row although there are many rows in table? that question but it really doesn't answer my :)
I am sorry for the question. I found my mistake. ResultSet next() method is working fine, but I've changed its value in my addParticipantsIdToCourse(course) method :)
when I call this list Item API in zoho books:
https://www.zoho.com/invoice/api/v3/settings/items/#list-items , the response is:
{"code":4,"message":"Invalid value passed for JSONString"}
How does API work
HTTP verbs are used to access the resources - GET, POST, PUT and DELETE. All parameters in the request should be form-urlencoded. For all the APIs you need to pass authtoken and organization_id. Input JSON string should be passed using JSONString parameter.
private void testZoho() {
JSONObject JSONString = new JSONObject();
JSONString.put("sort_column", "name");
String urlParameters ="authtoken=xxxxxx&organization_id=xxxxx";
urlParameters += "&JSONString=" + JSONString.toJSONString();
System.out.println("retJson1:\n" + urlParameters);
HttpURLConnection httpcon;
String url = "https://books.zoho.com/api/v3/items";
String data = urlParameters;
String result = null;
try{
//Connect
httpcon = (HttpURLConnection) ((new URL (url).openConnection()));
httpcon.setDoOutput(true);
String charset = "UTF-8";
httpcon.setRequestProperty("Accept-Charset", charset);
httpcon.setRequestProperty("Content-Type", "application/json;charset=" + charset);
httpcon.setRequestMethod("GET");
httpcon.connect();
//Write
OutputStream os = httpcon.getOutputStream();
BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(os, "UTF-8"));
writer.write(data);
writer.close();
os.close();
//Read
BufferedReader br = new BufferedReader(new InputStreamReader(httpcon.getInputStream(),"UTF-8"));
String line = null;
StringBuilder sb = new StringBuilder();
while ((line = br.readLine()) != null) {
sb.append(line);
}
br.close();
result = sb.toString();
} catch (UnsupportedEncodingException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
System.out.println("retJson1:\n" + result);
}
You need to pass all parameter as url parameter instead of writing in the stream. I have modified your code:
private void testZoho()
{
HttpURLConnection httpcon;
String url = "https://books.zoho.com/api/v3/items?authtoken=xxxx&organization_id=xxx&sort_column=name";
String result = null;
try
{
//Connect
httpcon = (HttpURLConnection) ((new URL(url).openConnection()));
httpcon.setDoOutput(true);
String charset = "UTF-8";
httpcon.setRequestProperty("Accept-Charset", charset);
httpcon.setRequestProperty("Content-Type", "application/json;charset=" + charset);
httpcon.setRequestMethod("GET");
httpcon.connect();
//Read
BufferedReader br = new BufferedReader(new InputStreamReader(httpcon.getInputStream(), "UTF-8"));
String line = null;
StringBuilder sb = new StringBuilder();
while((line = br.readLine()) != null)
{
sb.append(line);
}
br.close();
result = sb.toString();
}
catch(UnsupportedEncodingException e)
{
e.printStackTrace();
}
catch(IOException e)
{
e.printStackTrace();
}
System.out.println("retJson1:\n" + result);
}