Hi Aditya Singh,
Thanks for reaching out to Microsoft Q&A.
I think the below should work, provided with some changes to suit your scenario. Do you face any issues, if yes can you explain it?
// Import necessary libraries
import org.apache.spark.sql.{SparkSession, DataFrame}
// Define function to read data from SQL Server
def readFromSqlServer(spark: SparkSession, jdbcUrl: String, table: String): DataFrame = {
spark.read.format("jdbc")
.option("url", jdbcUrl)
.option("dbtable", table)
.option("user", "<your_username>")
.option("password", "<your_password>")
.load()
}
// Define JDBC connection properties
val jdbcUrl = "jdbc:sqlserver://<server>:<port>;databaseName=<database_name>"
// Define table to read from SQL Server
val tableName = "<table_name>"
// Create SparkSession
val spark = SparkSession.builder()
.appName("Read from SQL Server and Insert into Synapse Table")
.getOrCreate()
// Read data from SQL Server
val dataFromSqlServer = readFromSqlServer(spark, jdbcUrl, tableName)
// Perform transformations if necessary
// Define SQL connection properties
val synapseUrl = "jdbc:sqlserver://<synapse_server>.sql.azuresynapse.net:1433;database=<synapse_database>"
val synapseUsername = "<synapse_username>"
val synapsePassword = "<synapse_password>"
// Write data to SQL table
dataFromSqlServer.write
.format("com.databricks.spark.sqldw")
.option("url", synapseUrl)
.option("dbtable", "<synapse_table_name>")
.option("user", synapseUsername)
.option("password", synapsePassword)
.mode("append")
.save()
// Stop SparkSession
spark.stop()
Please 'Upvote'(Thumbs-up) and 'Accept' as an answer if the reply was helpful. This will benefit other community members who face the same issue.