The login failed Issue
Hello Team,
I am trying to read the data from Synapse analytics from databricks.The below is the Query to read the table.
%python
tableName = "dbo.Location"
Read from Azure Synapse table via spark.read.load
df = spark.read \
.format("com.databricks.spark.sqldw") \
.option("url", "jdbc:sqlserver://xyz.sql.azuresynapse.net:1433;database=xyz;user=admin_synapse@tbue-synw-bis-cwe1;password=2022_;encrypt=true;trustServerCertificate=true;hostNameInCertificate=*.sql.azuresynapse.net;loginTimeout=30;")
.option("tempDir", "abfss://xyz@xyz .dfs.core.windows.net/temp") \
.option("forwardSparkAzureStorageCredentials", "true") \
.option("dbTable", tableName) \
.load()
I am getting error :
com.databricks.spark.sqldw.SqlDWSideException: Azure Synapse Analytics failed to execute the JDBC query produced by the connector.
---------------------------------------------------------------------------
Py4JJavaError Traceback (most recent call last)
<command-911231710473670> in <cell line: 9>()
7 "[REDACTED]")
8 ### Read from Azure Synapse table via spark.read.load
----> 9 df = spark.read \
10 .format("com.databricks.spark.sqldw") \
11 .option("url", "jdbc:sqlserver://tbue-synw-bis-cwe1.sql.azuresynapse.net:1433;database=tbuesynwbiscwe1;user=admin_synapse@tbue-synw-bis-cwe1;password=2022DLH_;encrypt=true;trustServerCertificate=true;hostNameInCertificate=*.sql.azuresynapse.net;loginTimeout=30;")\
/databricks/spark/python/pyspark/sql/readwriter.py in load(self, path, format, schema, **options)
182 return self._df(self._jreader.load(self._spark._sc._jvm.PythonUtils.toSeq(path)))
183 else:
--> 184 return self._df(self._jreader.load())
185
186 def json(
/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/java_gateway.py in call(self, *args)
1319
1320 answer = self.gateway_client.send_command(command)
-> 1321 return_value = get_return_value(
1322 answer, self.gateway_client, self.target_id, self.name)
1323
/databricks/spark/python/pyspark/sql/utils.py in deco(*a, **kw)
194 def deco(*a: Any, **kw: Any) -> Any:
195 try:
--> 196 return f(*a, **kw)
197 except Py4JJavaError as e:
198 converted = convert_exception(e.java_exception)
/databricks/spark/python/lib/py4j-0.10.9.5-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name)
324 value = OUTPUT_CONVERTERtype
325 if answer1 == REFERENCE_TYPE:
--> 326 raise Py4JJavaError(
327 "An error occurred while calling {0}{1}{2}.\n".
328 format(target_id, ".", name), value)
Py4JJavaError: An error occurred while calling o3673.load.
: com.databricks.spark.sqldw.SqlDWSideException: Azure Synapse Analytics failed to execute the JDBC query produced by the connector.
Underlying SQLException(s):
- com.microsoft.sqlserver.jdbc.SQLServerException: Invalid object name 'dbo.Location'. [ErrorCode = 208] [SQLState = S0002] at com.databricks.spark.sqldw.Utils$.wrapExceptions(Utils.scala:723)
at com.databricks.spark.sqldw.SqlDWRelation.schema$lzycompute(SqlDWRelation.scala:68)
at com.databricks.spark.sqldw.SqlDWRelation.schema(SqlDWRelation.scala:67)
at org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:496)
at org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:368)
at org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:324)
at scala.Option.getOrElse(Option.scala:189)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:324)
at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:223)
at sun.reflect.GeneratedMethodAccessor439.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:380)
at py4j.Gateway.invoke(Gateway.java:306)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.ClientServerConnection.waitForCommands(ClientServerConnection.java:195)
at py4j.ClientServerConnection.run(ClientServerConnection.java:115)
at java.lang.Thread.run(Thread.java:748)
Caused by: java.sql.SQLException: Exception thrown in awaitResult:
at com.databricks.spark.sqldw.JDBCWrapper.executeInterruptibly(SqlDWJDBCWrapper.scala:137)
at com.databricks.spark.sqldw.JDBCWrapper.resolveTable(SqlDWJDBCWrapper.scala:180)
at com.databricks.spark.sqldw.SqlDWRelation.$anonfun$schema$3(SqlDWRelation.scala:70)
at com.databricks.spark.sqldw.JDBCWrapper.withConnection(SqlDWJDBCWrapper.scala:340)
at com.databricks.spark.sqldw.SqlDWRelation.$anonfun$schema$2(SqlDWRelation.scala:69)
at scala.Option.getOrElse(Option.scala:189)
at com.databricks.spark.sqldw.SqlDWRelation.$anonfun$schema$1(SqlDWRelation.scala:69)
at com.databricks.spark.sqldw.Utils$.wrapExceptions(Utils.scala:692)
... 19 more
Caused by: com.microsoft.sqlserver.jdbc.SQLServerException: Invalid object name 'dbo.Location'.
at com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:262)
at com.microsoft.sqlserver.jdbc.SQLServerStatement.getNextResult(SQLServerStatement.java:1632)
at com.microsoft.sqlserver.jdbc.SQLServerStatement.doExecuteStatement(SQLServerStatement.java:872)
at com.microsoft.sqlserver.jdbc.SQLServerStatement$StmtExecCmd.doExecute(SQLServerStatement.java:767)
at com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7418)
at com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:3272)
at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeCommand(SQLServerStatement.java:247)
at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeStatement(SQLServerStatement.java:222)
at com.microsoft.sqlserver.jdbc.SQLServerStatement.executeQueryInternal(SQLServerStatement.java:699)
at com.microsoft.sqlserver.jdbc.SQLServerPreparedStatement.buildExecuteMetaData(SQLServerPreparedStatement.java:1090)
at com.microsoft.sqlserver.jdbc.SQLServerPreparedStatement.getMetaData(SQLServerPreparedStatement.java:1066)
at com.databricks.spark.sqldw.JDBCWrapper.$anonfun$resolveTable$1(SqlDWJDBCWrapper.scala:180)
at com.databricks.spark.sqldw.JDBCWrapper.$anonfun$executeInterruptibly$3(SqlDWJDBCWrapper.scala:129)
at scala.concurrent.Future$.$anonfun$apply$1(Future.scala:659)
at scala.util.Success.$anonfun$map$1(Try.scala:255)
at scala.util.Success.map(Try.scala:213)
at scala.concurrent.Future.$anonfun$map$1(Future.scala:292)
at scala.concurrent.impl.Promise.liftedTree1$1(Promise.scala:33)
at scala.concurrent.impl.Promise.$anonfun$transform$1(Promise.scala:33)
at scala.concurrent.impl.CallbackRunnable.run(Promise.scala:64)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
But already the table is exist as external table :
Please advise the solution.
Regards
Rohit