I am trying to read data from Oracle server which is running in 'CST' timezone. My google DataFlow is running in 'us-central1' region. I am using Apache Beam-2.3.0 JDBCIO.read() method to read data from Oracle server. I am able to connect and read data from server using 'DirectRunner' but getting the following error in 'DataflowRunner' using ojdbc8 driver jar
(901b8e8f2f8a547a): java.lang.RuntimeException: org.apache.beam.sdk.util.UserCodeException: java.sql.SQLException: Cannot create PoolableConnectionFactory (ORA-00604: error occurred at recursive SQL level 1
ORA-01882: timezone region not found
)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory$3.typedApply(MapTaskExecutorFactory.java:338)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory$3.typedApply(MapTaskExecutorFactory.java:308)
at com.google.cloud.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
at com.google.cloud.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
at com.google.cloud.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory.create(MapTaskExecutorFactory.java:154)
at com.google.cloud.dataflow.worker.DataflowWorker.doWork(DataflowWorker.java:308)
at com.google.cloud.dataflow.worker.DataflowWorker.getAndPerformWork(DataflowWorker.java:264)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:133)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:113)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:100)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: org.apache.beam.sdk.util.UserCodeException: java.sql.SQLException: Cannot create PoolableConnectionFactory (ORA-00604: error occurred at recursive SQL level 1
ORA-01882: timezone region not found
)
at org.apache.beam.sdk.util.UserCodeException.wrap(UserCodeException.java:36)
at org.apache.beam.sdk.io.jdbc.JdbcIO$ReadFn$DoFnInvoker.invokeSetup(Unknown Source)
at com.google.cloud.dataflow.worker.DoFnInstanceManagers$ConcurrentQueueInstanceManager.deserializeCopy(DoFnInstanceManagers.java:63)
at com.google.cloud.dataflow.worker.DoFnInstanceManagers$ConcurrentQueueInstanceManager.peek(DoFnInstanceManagers.java:45)
at com.google.cloud.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:94)
at com.google.cloud.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:74)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory.createParDoOperation(MapTaskExecutorFactory.java:415)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory$3.typedApply(MapTaskExecutorFactory.java:326)
... 14 more
Caused by: java.sql.SQLException: Cannot create PoolableConnectionFactory (ORA-00604: error occurred at recursive SQL level 1
ORA-01882: timezone region not found
)
at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2294)
at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:2039)
at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1533)
at org.apache.beam.sdk.io.jdbc.JdbcIO$ReadFn.setup(JdbcIO.java:503)
Caused by: java.sql.SQLException: ORA-00604: error occurred at recursive SQL level 1
ORA-01882: timezone region not found
at oracle.jdbc.driver.T4CTTIoer11.processError(T4CTTIoer11.java:494)
at oracle.jdbc.driver.T4CTTIoer11.processError(T4CTTIoer11.java:441)
at oracle.jdbc.driver.T4CTTIoer11.processError(T4CTTIoer11.java:436)
at oracle.jdbc.driver.T4CTTIfun.processError(T4CTTIfun.java:1061)
at oracle.jdbc.driver.T4CTTIoauthenticate.processError(T4CTTIoauthenticate.java:550)
at oracle.jdbc.driver.T4CTTIfun.receive(T4CTTIfun.java:623)
at oracle.jdbc.driver.T4CTTIfun.doRPC(T4CTTIfun.java:252)
at oracle.jdbc.driver.T4CTTIoauthenticate.doOAUTH(T4CTTIoauthenticate.java:499)
at oracle.jdbc.driver.T4CTTIoauthenticate.doOAUTH(T4CTTIoauthenticate.java:1279)
at oracle.jdbc.driver.T4CConnection.logon(T4CConnection.java:663)
at oracle.jdbc.driver.PhysicalConnection.connect(PhysicalConnection.java:688)
at oracle.jdbc.driver.T4CDriverExtension.getConnection(T4CDriverExtension.java:39)
at oracle.jdbc.driver.OracleDriver.connect(OracleDriver.java:691)
at org.apache.commons.dbcp2.DriverConnectionFactory.createConnection(DriverConnectionFactory.java:39)
at org.apache.commons.dbcp2.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:256)
at org.apache.commons.dbcp2.BasicDataSource.validateConnectionFactory(BasicDataSource.java:2304)
at org.apache.commons.dbcp2.BasicDataSource.createPoolableConnectionFactory(BasicDataSource.java:2290)
at org.apache.commons.dbcp2.BasicDataSource.createDataSource(BasicDataSource.java:2039)
at org.apache.commons.dbcp2.BasicDataSource.getConnection(BasicDataSource.java:1533)
at org.apache.beam.sdk.io.jdbc.JdbcIO$ReadFn.setup(JdbcIO.java:503)
at org.apache.beam.sdk.io.jdbc.JdbcIO$ReadFn$DoFnInvoker.invokeSetup(Unknown Source)
at com.google.cloud.dataflow.worker.DoFnInstanceManagers$ConcurrentQueueInstanceManager.deserializeCopy(DoFnInstanceManagers.java:63)
at com.google.cloud.dataflow.worker.DoFnInstanceManagers$ConcurrentQueueInstanceManager.peek(DoFnInstanceManagers.java:45)
at com.google.cloud.dataflow.worker.UserParDoFnFactory.create(UserParDoFnFactory.java:94)
at com.google.cloud.dataflow.worker.DefaultParDoFnFactory.create(DefaultParDoFnFactory.java:74)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory.createParDoOperation(MapTaskExecutorFactory.java:415)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory$3.typedApply(MapTaskExecutorFactory.java:326)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory$3.typedApply(MapTaskExecutorFactory.java:308)
at com.google.cloud.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:63)
at com.google.cloud.dataflow.worker.graph.Networks$TypeSafeNodeFunction.apply(Networks.java:50)
at com.google.cloud.dataflow.worker.graph.Networks.replaceDirectedNetworkNodes(Networks.java:87)
at com.google.cloud.dataflow.worker.MapTaskExecutorFactory.create(MapTaskExecutorFactory.java:154)
at com.google.cloud.dataflow.worker.DataflowWorker.doWork(DataflowWorker.java:308)
at com.google.cloud.dataflow.worker.DataflowWorker.getAndPerformWork(DataflowWorker.java:264)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.doWork(DataflowBatchWorkerHarness.java:133)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:113)
at com.google.cloud.dataflow.worker.DataflowBatchWorkerHarness$WorkerThread.call(DataflowBatchWorkerHarness.java:100)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Note: Connected and read data from server successfully using ojdbc6 driver in both DirectRunner and DataflowRunner. But I want to get it work using ojdbc8 driver jar.
The following is my datasourceConfiguration for JDBCIO
DataSourceConfiguration dataSourceConfiguration = JdbcIO.DataSourceConfiguration
.create(options.getDriverName(), options.getJdbcUrl())
.withUsername(options.getUsername())
.withPassword(options.getPassword())
.withConnectionProperties("timezone=CST");
Any inputs to make this connection successfull using ojdbc8 driver jar in 'DataflowRunner'?
It looks like the error (ORA-00604: error occurred at recursive SQL level 1
ORA-01882: timezone region not found is known and quite common with the Oracle JDBC drivers.
It happens when the the JDBC driver is not able, for some reason, to send the right Timezone ID to the server. After some investigation and searches across different sources (including other Stack Overflow cases such as this one or this other one), I have found different possible solutions, so let me summarize them here:
Make sure that you are using the latest available version of the ojdbc8 driver, because maybe there was something broken in the specific version you were using. Try to change to a different version and see if that works.
Try setting the default Timezone to your timezone before establishing the connection, as in Solution 2 below.
Add the configuration line in Solution 3 to the file oracle/jdbc/defaultConnectionProperties.properties.
Summarized solutions:
// Solution 2
TimeZone timeZone = TimeZone.getTimeZone("yourTimeZone");
TimeZone.setDefault(timeZone);
// Solution 3
oracle.jdbc.timezoneAsRegion=false
Related
We have application running on weblogic . At times ( once in 2-3 weeks ) all of the sudden I start getting below in stack trace. Even after multiple try I only get "JZ0R2: No result set for this query" but data is there in DB for the row. And to resolve I simply refresh weblogic connection pools and things start working as expected. Can someone help with any tentative reason for this behavior.
DB : Sybase DB version 15.7
Java: 1.7
Error retrieving database meta-data; nested exception is org.springframework.jdbc.support.MetaDataAccessException: Error while extracting DatabaseMetaData; nested exception is java.sql.SQLException: JZ0R2: No result set for this query.
org.springframework.dao.DataAccessResourceFailureException: Error retrieving database meta-data; nested exception is org.springframework.jdbc.support.MetaDataAccessException: Error while extracting DatabaseMetaData; nested exception is java.sql.SQLException: JZ0R2: No result set for this query.
at org.springframework.jdbc.core.metadata.CallMetaDataProviderFactory.createMetaDataProvider(CallMetaDataProviderFactory.java:142)
at org.springframework.jdbc.core.metadata.CallMetaDataContext.initializeMetaData(CallMetaDataContext.java:243)
at org.springframework.jdbc.core.simple.AbstractJdbcCall.compileInternal(AbstractJdbcCall.java:304)
at org.springframework.jdbc.core.simple.AbstractJdbcCall.compile(AbstractJdbcCall.java:289)
at org.springframework.jdbc.core.simple.AbstractJdbcCall.checkCompiled(AbstractJdbcCall.java:349)
at org.springframework.jdbc.core.simple.AbstractJdbcCall.doExecute(AbstractJdbcCall.java:364)
at org.springframework.jdbc.core.simple.SimpleJdbcCall.execute(SimpleJdbcCall.java:197)
Try one of these:
conn.setAutoCommit( true );
or set various isolation levels, like this one, for instance:
conn.setTransactionIsolation(0);
I am getting this error when attempting to stream an RDS MySQL table into Redshift: Error converting data, invalid type for parameter
The problem field is a DATETIME in MySQL and timestamp without time zone in Redshift (same happens for timestamp with time zone). Note: pipeline was working fine until I populated the date field.
We are using Debezium as the Kafka Connect source for getting data from RDS into Kafka. And the JDBC sink connector with Redshift JDBC driver for the sink.
Also... I am able to get the data flowing if I make the Redshift field a varchar or a bigint. When I do this, I see that the data is coming across as a unix epoch integer in ms. But we'd really like a timestamp!
Error message in context:
2018-10-18 22:48:32,972 DEBUG || INSERT sql: INSERT INTO "funschema"."test_table"("user_id","subscription_code","source","receipt","starts_on") VALUES(?,?,?,?,?) [io.confluent.connect.jdbc.sink.BufferedRecords]
2018-10-18 22:48:32,987 WARN || Write of 28 records failed, remainingRetries=7 [io.confluent.connect.jdbc.sink.JdbcSinkTask]
java.sql.BatchUpdateException: [Amazon][JDBC](10120) Error converting data, invalid type for parameter: 5.
at com.amazon.jdbc.common.SStatement.createBatchUpdateException(Unknown Source)
at com.amazon.jdbc.common.SStatement.access$100(Unknown Source)
at com.amazon.jdbc.common.SStatement$BatchExecutionContext.createBatchUpdateException(Unknown Source)
at com.amazon.jdbc.common.SStatement$BatchExecutionContext.createResults(Unknown Source)
at com.amazon.jdbc.common.SStatement$BatchExecutionContext.doProcess(Unknown Source)
at com.amazon.jdbc.common.SStatement$BatchExecutionContext.processInt(Unknown Source)
at com.amazon.jdbc.common.SStatement.processBatchResults(Unknown Source)
at com.amazon.jdbc.common.SPreparedStatement.executeBatch(Unknown Source)
at io.confluent.connect.jdbc.sink.BufferedRecords.flush(BufferedRecords.java:138)
at io.confluent.connect.jdbc.sink.JdbcDbWriter.write(JdbcDbWriter.java:66)
at io.confluent.connect.jdbc.sink.JdbcSinkTask.put(JdbcSinkTask.java:75)
Thanks,
Tom
I am debugging a Big Data code in Production environment of my company. Hive return the following error:
Exception: org.apache.hadoop.hive.ql.lockmgr.LockException: No record of lock could be found, may have timed out
Killing DAG...
Execution has failed.
Exception in thread "main" java.sql.SQLException: Error while processing statement: FAILED: Execution Error, return code 1 from org.apache.hadoop.hive.ql.exec.tez.TezTask.
at org.apache.hive.jdbc.HiveStatement.execute(HiveStatement.java:282)
at org.apache.hive.jdbc.HiveStatement.executeUpdate(HiveStatement.java:392)
at HiveExec.main(HiveExec.java:159)
After investigation, I have found that this error could be caused by BoneCP in connectionPoolingType property, but the cluster support team told me that they fixed this bug by upgrading BoneCP.
My question is: can we INSERT INTO an external table in Hive, because I have doubt about the insertion script ?
Yes, you can insert into external table.
having WSO2 IS 5.0.0.SP1 no HA deployment with PostgreSQL we occasionally have following exception in the log (and I believe the users are unable to log in, they need to close the application (web/mobile) and try again):
wso2carbon.log
Error while storing session data {org.wso2.carbon.identity.application.authentication.framework.store.SessionDataStore}
org.postgresql.util.PSQLException: ERROR: duplicate key value violates unique constraint "idn_auth_session_store_pkey"
Detail: Key (session_id, session_type)=(315f9b01-6008-4794-b7d8-a76c7cc83102, SAMLSSOParticipantCache) already exists.
at org.postgresql.core.v3.QueryExecutorImpl.receiveErrorResponse(QueryExecutorImpl.java:2198)
at org.postgresql.core.v3.QueryExecutorImpl.processResults(QueryExecutorImpl.java:1927)
at org.postgresql.core.v3.QueryExecutorImpl.execute(QueryExecutorImpl.java:255)
at org.postgresql.jdbc2.AbstractJdbc2Statement.execute(AbstractJdbc2Statement.java:562)
at org.postgresql.jdbc2.AbstractJdbc2Statement.executeWithFlags(AbstractJdbc2Statement.java:420)
at org.postgresql.jdbc2.AbstractJdbc2Statement.executeUpdate(AbstractJdbc2Statement.java:366)
at org.wso2.carbon.identity.application.authentication.framework.store.SessionDataStore.persistSessionData(SessionDataStore.java:363)
at org.wso2.carbon.identity.application.authentication.framework.store.SessionDataPersistTask.run(SessionDataPersistTask.java:54)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
Session persistence configuration in the identity.xml:
<SessionDataPersist>
<Enable>true</Enable>
<RememberMePeriod>780</RememberMePeriod>
<CleanUp>
<Enable>true</Enable>
<Period>60</Period>
<TimeOut>781</TimeOut>
</CleanUp>
<Temporary>true</Temporary>
</SessionDataPersist>
Any idea / hint would be appreciated.. Couldn't it be using the "back" button? When the session is persisted in the cache?
Thank you all in advance
Gabriel
I'm trying to get some data from a remote Oracle Database,
so I configured a new connection to the database and whn I press Test, it says that the connection was established succesfully, but when i tried a simple select query, Report Designer gives me an error:
org.pentaho.reporting.engine.classic.core.ReportDataFactoryException: Failed at query: select * from fact_table;
at org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SimpleSQLReportDataFactory.queryData(SimpleSQLReportDataFactory.java:258)
at org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SQLReportDataFactory.queryData(SQLReportDataFactory.java:171)
at org.pentaho.reporting.ui.datasources.jdbc.ui.JdbcPreviewWorker.run(JdbcPreviewWorker.java:103)
at java.lang.Thread.run(Unknown Source)
ParentException:
java.sql.SQLException: ORA-00911: invalid character
at oracle.jdbc.driver.DatabaseError.throwSqlException(DatabaseError.java:125)
at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:305)
at oracle.jdbc.driver.T4CTTIoer.processError(T4CTTIoer.java:272)
at oracle.jdbc.driver.T4C8Oall.receive(T4C8Oall.java:623)
at oracle.jdbc.driver.T4CStatement.doOall8(T4CStatement.java:112)
at oracle.jdbc.driver.T4CStatement.execute_for_describe(T4CStatement.java:351)
at oracle.jdbc.driver.OracleStatement.execute_maybe_describe(OracleStatement.java:896)
at oracle.jdbc.driver.T4CStatement.execute_maybe_describe(T4CStatement.java:383)
at oracle.jdbc.driver.OracleStatement.doExecuteWithTimeout(OracleStatement.java:986)
at oracle.jdbc.driver.OracleStatement.doScrollExecuteCommon(OracleStatement.java:3763)
at oracle.jdbc.driver.OracleStatement.doScrollStmtExecuteQuery(OracleStatement.java:3887)
at oracle.jdbc.driver.OracleStatement.executeQuery(OracleStatement.java:1131)
at org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SimpleSQLReportDataFactory.parametrizeAndQuery(SimpleSQLReportDataFactory.java:422)
at org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SimpleSQLReportDataFactory.queryData(SimpleSQLReportDataFactory.java:254)
at org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.SQLReportDataFactory.queryData(SQLReportDataFactory.java:171)
at org.pentaho.reporting.ui.datasources.jdbc.ui.JdbcPreviewWorker.run(JdbcPreviewWorker.java:103)
at java.lang.Thread.run(Unknown Source)
So how can I get this done?
select * from fact_table; seems to be a valid query. Try to remove the semicolon at the end.