You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@spark.apache.org by "melin (Jira)" <ji...@apache.org> on 2023/03/01 05:06:00 UTC
[jira] [Updated] (SPARK-42627) Spark: Getting SQLException: Unsupported type -102 reading from Oracle
[ https://issues.apache.org/jira/browse/SPARK-42627?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
melin updated SPARK-42627:
--------------------------
Description:
{code:java}
Exception in thread "main" org.apache.spark.SparkSQLException: Unrecognized SQL type -102
at org.apache.spark.sql.errors.QueryExecutionErrors$.unrecognizedSqlTypeError(QueryExecutionErrors.scala:832)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getCatalystType(JdbcUtils.scala:225)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$getSchema$1(JdbcUtils.scala:308)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getSchema(JdbcUtils.scala:308)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.getQueryOutputSchema(JDBCRDD.scala:70)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation$.getSchema(JDBCRelation.scala:242)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:37)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:350)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:228)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:210)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:210)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:171)
{code}
oracle sql:
{code:java}
CREATE TABLE "ORDERS"
( "ORDER_ID" NUMBER(9,0) NOT NULL ENABLE,
"ORDER_DATE" TIMESTAMP (3) WITH LOCAL TIME ZONE NOT NULL ENABLE,
"CUSTOMER_NAME" VARCHAR2(255) NOT NULL ENABLE,
"PRICE" NUMBER(10,5) NOT NULL ENABLE,
"PRODUCT_ID" NUMBER(9,0) NOT NULL ENABLE,
"ORDER_STATUS" NUMBER(1,0) NOT NULL ENABLE,
PRIMARY KEY ("ORDER_ID")
USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
TABLESPACE "LOGMINER_TBS" ENABLE,
SUPPLEMENTAL LOG DATA (ALL) COLUMNS
) SEGMENT CREATION IMMEDIATE
PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING
STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
TABLESPACE "LOGMINER_TBS"
{code}
was:
```
Exception in thread "main" org.apache.spark.SparkSQLException: Unrecognized SQL type -102
at org.apache.spark.sql.errors.QueryExecutionErrors$.unrecognizedSqlTypeError(QueryExecutionErrors.scala:832)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getCatalystType(JdbcUtils.scala:225)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$getSchema$1(JdbcUtils.scala:308)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getSchema(JdbcUtils.scala:308)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.getQueryOutputSchema(JDBCRDD.scala:70)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation$.getSchema(JDBCRelation.scala:242)
at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:37)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:350)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:228)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:210)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:210)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:171)
```
oracle sql:
```sql
CREATE TABLE "ORDERS"
( "ORDER_ID" NUMBER(9,0) NOT NULL ENABLE,
"ORDER_DATE" TIMESTAMP (3) WITH LOCAL TIME ZONE NOT NULL ENABLE,
"CUSTOMER_NAME" VARCHAR2(255) NOT NULL ENABLE,
"PRICE" NUMBER(10,5) NOT NULL ENABLE,
"PRODUCT_ID" NUMBER(9,0) NOT NULL ENABLE,
"ORDER_STATUS" NUMBER(1,0) NOT NULL ENABLE,
PRIMARY KEY ("ORDER_ID")
USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
TABLESPACE "LOGMINER_TBS" ENABLE,
SUPPLEMENTAL LOG DATA (ALL) COLUMNS
) SEGMENT CREATION IMMEDIATE
PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING
STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
TABLESPACE "LOGMINER_TBS"
```
> Spark: Getting SQLException: Unsupported type -102 reading from Oracle
> ----------------------------------------------------------------------
>
> Key: SPARK-42627
> URL: https://issues.apache.org/jira/browse/SPARK-42627
> Project: Spark
> Issue Type: Bug
> Components: SQL
> Affects Versions: 3.3.2
> Reporter: melin
> Priority: Major
>
>
> {code:java}
> Exception in thread "main" org.apache.spark.SparkSQLException: Unrecognized SQL type -102
> at org.apache.spark.sql.errors.QueryExecutionErrors$.unrecognizedSqlTypeError(QueryExecutionErrors.scala:832)
> at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getCatalystType(JdbcUtils.scala:225)
> at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.$anonfun$getSchema$1(JdbcUtils.scala:308)
> at scala.Option.getOrElse(Option.scala:189)
> at org.apache.spark.sql.execution.datasources.jdbc.JdbcUtils$.getSchema(JdbcUtils.scala:308)
> at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.getQueryOutputSchema(JDBCRDD.scala:70)
> at org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
> at org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation$.getSchema(JDBCRelation.scala:242)
> at org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:37)
> at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:350)
> at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:228)
> at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:210)
> at scala.Option.getOrElse(Option.scala:189)
> at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:210)
> at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:171)
>
> {code}
> oracle sql:
>
> {code:java}
> CREATE TABLE "ORDERS"
> ( "ORDER_ID" NUMBER(9,0) NOT NULL ENABLE,
> "ORDER_DATE" TIMESTAMP (3) WITH LOCAL TIME ZONE NOT NULL ENABLE,
> "CUSTOMER_NAME" VARCHAR2(255) NOT NULL ENABLE,
> "PRICE" NUMBER(10,5) NOT NULL ENABLE,
> "PRODUCT_ID" NUMBER(9,0) NOT NULL ENABLE,
> "ORDER_STATUS" NUMBER(1,0) NOT NULL ENABLE,
> PRIMARY KEY ("ORDER_ID")
> USING INDEX PCTFREE 10 INITRANS 2 MAXTRANS 255 COMPUTE STATISTICS
> STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
> PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
> TABLESPACE "LOGMINER_TBS" ENABLE,
> SUPPLEMENTAL LOG DATA (ALL) COLUMNS
> ) SEGMENT CREATION IMMEDIATE
> PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING
> STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645
> PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)
> TABLESPACE "LOGMINER_TBS"
>
> {code}
>
--
This message was sent by Atlassian Jira
(v8.20.10#820010)
---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscribe@spark.apache.org
For additional commands, e-mail: issues-help@spark.apache.org