server_name =
database_name =
jdbc_url = f"jdbc:sqlserver://{server_name}:1433;database={database_name};loginTimeout=30;"
access_token = notebookutils.credentials.getToken("pbi")
connection_properties = {
"accessToken": access_token
}
spark read or write jdbc(jdbc_url, "<MY_TABLE>", read or append, properties=connection_properties)
The above code results in the below error very intermittently. Note, with reads or writes.
An error occurred while calling o6642.jdbc.
: com.microsoft.sqlserver.jdbc.SQLServerException: Login failed for user ''.Reason: Validation of user's permissions failed.
ClientConnectionId
com.microsoft.sqlserver.jdbc.SQLServerException.makeFromDatabaseError(SQLServerException.java:259)
com.microsoft.sqlserver.jdbc.TDSTokenHandler.onEOF(tdsparser.java:304)
com.microsoft.sqlserver.jdbc.TDSParser.parse(tdsparser.java:137)
com.microsoft.sqlserver.jdbc.TDSParser.parse(tdsparser.java:42)
com.microsoft.sqlserver.jdbc.SQLServerConnection.sendLogon(SQLServerConnection.java:6612)
com.microsoft.sqlserver.jdbc.SQLServerConnection.logon(SQLServerConnection.java:5154)
com.microsoft.sqlserver.jdbc.SQLServerConnection.access$300(SQLServerConnection.java:94)
com.microsoft.sqlserver.jdbc.SQLServerConnection$LogonCommand.doExecute(SQLServerConnection.java:5087)
com.microsoft.sqlserver.jdbc.TDSCommand.execute(IOBuffer.java:7675)
com.microsoft.sqlserver.jdbc.SQLServerConnection.executeCommand(SQLServerConnection.java:4137)
com.microsoft.sqlserver.jdbc.SQLServerConnection.connectHelper(SQLServerConnection.java:3583)
com.microsoft.sqlserver.jdbc.SQLServerConnection.login(SQLServerConnection.java:3172)
com.microsoft.sqlserver.jdbc.SQLServerConnection.connectInternal(SQLServerConnection.java:3014)
com.microsoft.sqlserver.jdbc.SQLServerConnection.connect(SQLServerConnection.java:1836)
com.microsoft.sqlserver.jdbc.SQLServerDriver.connect(SQLServerDriver.java:1246)
org.apache.spark.sql.execution.datasources.jdbc.connection.BasicConnectionProvider.getConnection(BasicConnectionProvider.scala:49)
org.apache.spark.sql.execution.datasources.jdbc.connection.ConnectionProviderBase.create(ConnectionProvider.scala:102)
org.apache.spark.sql.jdbc.JdbcDialect.$anonfun$createConnectionFactory$1(JdbcDialects.scala:161)
org.apache.spark.sql.jdbc.JdbcDialect.$anonfun$createConnectionFactory$1$adapted(JdbcDialects.scala:157)
org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.getQueryOutputSchema(JDBCRDD.scala:63)
org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:58)
org.apache.spark.sql.execution.datasources.jdbc.JDBCRelation$.getSchema(JDBCRelation.scala:241)
org.apache.spark.sql.execution.datasources.jdbc.JdbcRelationProvider.createRelation(JdbcRelationProvider.scala:37)
org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:346)
org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:236)
org.apache.spark.sql.DataFrameReader.$anonfun$load$2(DataFrameReader.scala:219)
scala.Option.getOrElse(Option.scala:189)
org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:219)
org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:174)
org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:261)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
java.base/java.lang.reflect.Method.invoke(Method.java:566)
py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244)
py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:374)
py4j.Gateway.invoke(Gateway.java:282)
py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
py4j.commands.CallCommand.execute(CallCommand.java:79)
py4j.GatewayConnection.run(GatewayConnection.java:238)
java.base/java.lang.Thread.run(Thread.java:829)