I am downloading the data from third party API Application.
The main concern of the data will be increasing in day to day activity in this application.
past few days i am facing issue such as, for downloading data is to taking too much time (nearly more than 5 hours and this will keep on increasing if the data has increasing).
most of the times, i am getting timeout expired and data will not be downloading properly and it is terminated.
for the above requirement, i am developing the below code and calling this file (exe) in the scheduler.
The third party API application generate the output as XML.
from the xml, we are processing the data using SQL Bulk copy.
SQL Bulk copying taking too much time to download the data.
Is there any other way to download the data within short period of time..?
For every one loop, we can get 500 records only in the xml limitation (as per client policy)
we are struggling to download the data and some time Downloading the SQL table also, not able to do DML operations.
pls. find below complete source code.
double IntPNo = 0;
string ClientUri = "";
string xml_location = @"C:\\Windows\Temp\AllData.xml";
string accessToken = "";
string method = "GET";
string response = "";
DataSet theDataSet = new DataSet();
var ConnStr = System.Configuration.ConfigurationManager.ConnectionStrings["dbCon"].ConnectionString.ToString();
DBConnection dbCon = new DBConnection();
DataSet ds = new DataSet();
SqlConnection con = new SqlConnection(ConnStr);
SqlBulkCopy BLk1 = new SqlBulkCopy(con);
SqlBulkCopy BLk2 = new SqlBulkCopy(con);
int cnt = 0;
StreamReader str;
//To get Total pages from xml
int TotPages = 3600; //Total Pages 3600 for reference
int TotRecords = 900000; //9 Lakhs Records for reference
IntPNo = TotPages;
//**** Truncate table before bulk copying ****// con.Open(); cmd.Connection = con; cmd.CommandText = "Spd_Emp_Data"; cmd.CommandType = CommandType.StoredProcedure; cmd.Parameters.AddWithValue("@Spd_TYPE", "Remove_Data"); cmd.ExecuteNonQuery();
con.Close();
//**** Truncate table before bulk copying ****//
//*****// Total Records : 900000 ********
// IntPNo = Total Records / 500
(Continued....)
(Continued....)
for (int pNo = 1; pNo <= IntPNo; pNo++)
{
cnt = pageNumber;
BLk1.ColumnMappings.Clear();
BLk2.ColumnMappings.Clear();
ClientUri = "https://api1.sales.aus/api/empdata/entry?p_size=500&p_number=" + pNo + "&get_fields=empno,empname,empsal,dateofjoin,p1,p2,Remarks"; HttpWebRequest request = (HttpWebRequest)WebRequest.Create(ClientUri); string credentials = "";
if (accessToken != "")
credentials = "Bearer " + accessToken;
else
credentials = "Basic " + Convert.ToBase64String(Encoding.UTF8.GetBytes(userName + ":" + password)); request.Headers.Set(HttpRequestHeader.Authorization, credentials); request.AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate;
request.Headers.Set("X-Application-Key", "vndjusi98-dffs909-fdado9-adaldos");
request.Method = method;
con.Open();
try
{
using (str = new StreamReader( request.GetResponse().GetResponseStream()))
{
response = str.ReadToEnd();
StringReader theReader = new StringReader(response); theDataSet.ReadXml(theReader);
BLk1.DestinationTableName = "tbl_emp_data";
foreach (DataColumn col1 in theDataSet.Tables[2].Columns) BLk1.ColumnMappings.Add(col1.ColumnName, col1.ColumnName); BLk1.WriteToServer(theDataSet.Tables[2]); BLk2.DestinationTableName = "tbl_emp_address_data"; foreach (DataColumn col2 in theDataSet.Tables[3].Columns) BLk2.ColumnMappings.Add(col2.ColumnName, col2.ColumnName); BLk2.WriteToServer(theDataSet.Tables[3]); Clear(theDataSet.Tables[2]);
Clear(theDataSet.Tables[3]);
}
}
}