This is my asp.net code behind:
public string ReadJSON(string jsonPath)
{
FileStream fs = new FileStream(jsonPath, FileMode.Open, FileAccess.Read);
StreamReader sr = new StreamReader(fs);
string WillReturn = "";
try
{
WillReturn = sr.ReadToEnd();
return WillReturn;
}
catch (Exception ex)
{
WillReturn = null;
return WillReturn;
}
finally { sr.Close(); fs.Dispose(); }
}
But my data is 128 mb. And I am not taking error but no read.
I tried to debuging. WillReturn = sr.ReadToEnd(); context is: WillReturn could not evaluate expression.
How can I read to this?
It can read 127Mb text file into rows in 2 minutes 30++ seconds. Try this sample code
strFileName = ViewState("Physical path");
StreamReader sr = new StreamReader(strFileName);
do {
line = sr.ReadLine();
if ((line != null)) {
result = line.Split(Convert.ToChar(Constants.vbTab));
icount += 1;
dr = ds1.Tables(0).NewRow;
dr.BeginEdit();
dr("Item1") = result(0);
ds1.Tables(0).Rows.Add(dr);
}
} while (!(line == null));
Related
I'm using a filestream object to push a report in excel format from our DB, and it works fine for reports that are reasonable in terms the number of rows, however there's one report that has over 15K rows and the site times out after 20 minutes - I know I could increase the timeout, but even 20 minutes is unaccaptable right now. Is there any way to speed things up? Here is my current code:
string path = Server.MapPath(#"~\");
string sourceName = Path.Combine(path, "Demo", "Report_Template", templateName);
string newFileName = Path.Combine(Global.demo_data_directory, #"Reports", #"Demo Download Activity Report.xlsx"); // Path.Combine(path, "Demo_Data", "Reports", Filename + ".xlsx");
string copyresults = Utilities.CopyFile(sourceName, newFileName);
if (copyresults != "Copied file")
{
DemoDAL.ErrorLog("GenerateDownloadActivityReportFile.Page_Load: Copy Error - " + copyresults);
}
else
{
document = SpreadsheetDocument.Open(newFileName, true);
wbPart = document.WorkbookPart;
DataTable datatbl = (DataTable)Session["gvPOC"];
CreateReport(datatbl);
// ADDED THIS *********************************************************************************************************
//wbPart.Workbook.Save();
using (FileStream fsSource = new FileStream(newFileName, FileMode.Open, FileAccess.Read, FileShare.Read, 10240))
{
// Read the source file into a byte array.
byte[] bytes = new byte[fsSource.Length];
int numBytesToRead = (int)fsSource.Length;
int numBytesRead = 0;
while (numBytesToRead > 0)
{
// Read may return anything from 0 to numBytesToRead.
int n = fsSource.Read(bytes, numBytesRead, numBytesToRead);
// Break when the end of the file is reached.
if (n == 0)
break;
numBytesRead += n;
numBytesToRead -= n;
}
numBytesToRead = bytes.Length;
StreamFileToBrowser(newFileName, bytes);
}
File.Delete(newFileName);
}
public void StreamFileToBrowser(string sfilename, byte[] fileBytes)
{
try
{
Response.Clear();
Response.ClearHeaders();
Response.AppendHeader("Content-disposition", String.Format("attachment; filename=\"{0}\"", System.IO.Path.GetFileName(sfilename)));
Response.AppendHeader("Content-Type", "binary/octet-stream");
Response.AppendHeader("Content-length", fileBytes.Length.ToString());
Response.BinaryWrite(fileBytes);
if (Response.IsClientConnected)
Response.Flush();
}
catch (Exception ex)
{
DemoDAL.ErrorLog("StreamFileToBrowser: " + ex.Message);
}
}
I have an asp.net application in which I have a js file and an ashx file. Here in a download button click Im calling handler file in ajax call and retrieving sql table data in a json formatted string/data table and Im trying to export json formated string/data table to excel/csv file and download it. Please help me to find a solution. (Need a solution which help to export large amount of data and download)
I tried the below code but its not downloading excel file.
public void ProcessRequest(HttpContext context)
{
context.Response.AddHeader("content-disposition", "attachment; filename=FileName.xls");
context.Response.ContentType = "application/csv";
HttpResponse response = context.Response;
string exportContent = ExportToSpreadsheet(JsonStringToDataTable(GetDataFromTable()),'excelfilename');
response.Write(exportContent);
context.Response.End();
}
public DataTable JsonStringToDataTable(string jsonString)
{
DataTable dt = new DataTable();
string[] jsonStringArray = Regex.Split(jsonString.Replace("[", "").Replace("]", ""), "},{");
List<string> ColumnsName = new List<string>();
foreach (string jSA in jsonStringArray)
{
string[] jsonStringData = Regex.Split(jSA.Replace("{", "").Replace("}", ""), ",");
foreach (string ColumnsNameData in jsonStringData)
{
try
{
int idx = ColumnsNameData.IndexOf(":");
string ColumnsNameString = ColumnsNameData.Substring(0, idx - 1).Replace("\"", "");
if (!ColumnsName.Contains(ColumnsNameString))
{
ColumnsName.Add(ColumnsNameString);
}
}
catch (Exception ex)
{
//throw new Exception(string.Format(ex.Message + "Error Parsing Column Name : {0}", ColumnsNameData));
throw ex;
}
}
break;
}
foreach (string AddColumnName in ColumnsName)
{
dt.Columns.Add(AddColumnName);
}
foreach (string jSA in jsonStringArray)
{
string[] RowData = Regex.Split(jSA.Replace("{", "").Replace("}", ""), ",");
DataRow nr = dt.NewRow();
foreach (string rowData in RowData)
{
try
{
int idx = rowData.IndexOf(":");
string RowColumns = rowData.Substring(0, idx - 1).Replace("\"", "");
string RowDataString = rowData.Substring(idx + 1).Replace("\"", "");
nr[RowColumns] = RowDataString;
}
catch (Exception ex)
{
continue;
}
}
dt.Rows.Add(nr);
}
return dt;
}
private static string GetDataFromTable()
{
string returnValue = string.Empty;
var serializer = new JavaScriptSerializer { MaxJsonLength = Int32.MaxValue };
try
{
var result = //get data from sql table;
returnValue = serializer.Serialize(result);
}
catch (Exception e)
{
returnValue = serializer.Serialize(e.Message);
}
return returnValue;
}
public string ExportToSpreadsheet(DataTable table, string name)
{
string res = string.Empty;
try
{
//var resp = Response;
System.Web.HttpResponse resp = System.Web.HttpContext.Current.Response;
resp.Clear();
if (table != null)
{
foreach (DataColumn column in table.Columns)
{
resp.Write(column.ColumnName + ",");
}
}
resp.Write(Environment.NewLine);
if (table != null)
{
foreach (DataRow row in table.Rows)
{
for (int i = 0; i < table.Columns.Count; i++)
{
resp.Write(row[i].ToString().Replace(",", string.Empty) + ",");
}
resp.Write(Environment.NewLine);
}
}
res = "successfully downloaded";
resp.ContentType = "text/csv";
resp.AppendHeader("Content-Disposition", "attachment; filename=" + name + ".csv");
// resp.End();
}
catch(Exception ex)
{
res = ex.Message;
}
return res;
}
Start using a specialized libary like EPPlus. It will create real Excel files.
private void exportToExcel(DataTable dataTable)
{
using (ExcelPackage excelPackage = new ExcelPackage())
{
//create the worksheet
ExcelWorksheet worksheet = excelPackage.Workbook.Worksheets.Add("Sheet 1");
//load the datatable into the sheet, with headers
worksheet.Cells["A1"].LoadFromDataTable(dataTable, true);
//send the file to the browser
byte[] bin = excelPackage.GetAsByteArray();
Response.ClearHeaders();
Response.Clear();
Response.Buffer = true;
Response.ContentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet";
Response.AddHeader("content-length", bin.Length.ToString());
Response.AddHeader("content-disposition", "attachment; filename=\"ExcelDemo.xlsx\"");
Response.OutputStream.Write(bin, 0, bin.Length);
Response.Flush();
HttpContext.Current.ApplicationInstance.CompleteRequest();
}
}
I have been working on a code snippet today . A part of the Code reads the number of words in the file.I am using StreamReader to do the same , but it seems to give DirectoryNotFound Exception.Here is the code for the event
protected void Button1_Click(object sender, EventArgs e)
{
string filename = string.Empty;
string FilePath = ConfigurationManager.AppSettings["FilePath"].ToString();
if (FileUpload1.HasFile)
{
string[] Exe = { ".txt" };
string FileExt = System.IO.Path.GetExtension(FileUpload1.PostedFile.FileName);
bool isValidFile = Exe.Contains(FileExt);
if (isValidFile)
{
int FileSize = FileUpload1.PostedFile.ContentLength;
if (FileSize <= 102400)
{
filename = Path.GetFileName(FileUpload1.FileName);
FileUpload1.SaveAs(Server.MapPath(FilePath) + filename);
StreamReader sr = new StreamReader(FilePath+filename);
//The error shows up here and i have tried to use FilePath as the single parameter too
int counter = 0;
string delim = " ,.?!";
string[] fields = null;
string line = null;
while (!sr.EndOfStream)
{
line = sr.ReadLine();//each time you read a line you should split it into the words
line.Trim();
fields = line.Split(delim.ToCharArray(), StringSplitOptions.RemoveEmptyEntries);
counter += fields.Length; //and just add how many of them there is
}
sr.Close();
lblcount.Text = counter.ToString();
lblMsg.Text = "File upload successfully!";
}
else
{
lblMsg.Text = "File Size allowed upto 100kb!";
}
}
else
{
lblMsg.Text = "Please Upload a text File!";
}
}
else
{
lblMsg.Text = "Please upload a file!";
}
}
}
Can this be sorted out ??
Thanks in Advance!
Use Path.Combine to build paths:
string path = Path.Combine(Server.MapPath(FilePath), filename);
FileUpload1.SaveAs(path);
using(StreamReader sr = new StreamReader(path))
{
// ...
}
My CSV Code is:--
public void CreateCSVFile(DataTable dt, string strFilePath)
{
StreamWriter sw = new StreamWriter(strFilePath, false);
int iColCount = dt.Columns.Count;
for (int i = 0; i < iColCount; i++)
{
sw.Write(dt.Columns[i]);
if (i < iColCount - 1)
{
sw.Write(",");
}
}
sw.Write(sw.NewLine);
// Now write all the rows.
foreach (DataRow dr in dt.Rows)
{
for (int i = 0; i < iColCount; i++)
{
if (!Convert.IsDBNull(dr[i]))
{
string email = dr[i].ToString();
bool result = IsEmail(email);
if (result == true)
sw.Write(dr[i].ToString());
}
//if (i < iColCount - 1)
//{
// sw.Write(" , ");
//}
}
sw.Write(sw.NewLine);
}
sw.Close();
}
and on grid_RowCommand() doing this...
if (e.CommandName == "cmdCSV")
{
DataTable dtCSV = new DataTable();
dtCSV = ob.TotalRecord(TableField, TableName);
CreateCSVFile(dtCSV, "c:\\csv file/csv "+TableName+".csv");
lblMsg.Visible = true;
lblMsg.Text = "CSV File Successfully created in C.";
lblMsg.ForeColor = Color.Green;
}
Here CreateCSVFile(dtCSV, "c:\\csv file/csv "+TableName+".csv"); Download CSV File bydefault in c.Here i want to download CSV file in that location where i want to save.How can i do this??Please guide me.
Thanks in advance
Try This
Response.ContentType = "application/ms-excel";
Response.AddHeader("content-disposition", "attachment; filename=XYZ.csv");
string newpath2 = System.Web.HttpContext.Current.Server.MapPath("~//downloadfile//XYZ.csv");
FileStream sourceFile = new FileStream(newpath2, FileMode.Open);
long FileSize;
FileSize = sourceFile.Length;
byte[] getContent = new byte[(int)FileSize];
sourceFile.Read(getContent, 0, (int)sourceFile.Length);
sourceFile.Close();
OR
string filePath = Server.MapPath("~/files/myFileName.csv");
System.IO.FileInfo fileInfo = new System.IO.FileInfo(filePath);
Response.ContentType = "application/octet-stream";
Response.AddHeader("Content-Disposition", string.Format("attachment;filename=\\\"{0}\\\"", filePath));
Response.AddHeader("Content-Length", fileInfo.Length.ToString());
Response.WriteFile(filePath);
Response.End();
Is there any way to convert PDF file to DataTable? The PDF file mainly consist of only tables any help will be highly appreciated.
using iTextSharp.text;
using iTextSharp.text.pdf;
using iTextSharp.text.pdf.parser;
public DataTable ImportPDF(string Filename)
{
string strText = string.Empty;
List<string[]> list = new List<string[]>();
string[] PdfData = null;
try
{
PdfReader reader = new PdfReader((string)Filename);
for (int page = 1; page <= reader.NumberOfPages; page++)
{
ITextExtractionStrategy its = new iTextSharp.text.pdf.parser.LocationTextExtractionStrategy();
String cipherText = PdfTextExtractor.GetTextFromPage(reader, page, its);
cipherText = Encoding.UTF8.GetString(ASCIIEncoding.Convert(Encoding.Default, Encoding.UTF8, Encoding.Default.GetBytes(cipherText)));
strText = strText + "\n" + cipherText;
PdfData = strText.Split('\n');
}
reader.Close();
}
catch (Exception ex)
{
}
List<string> temp = PdfData.ToList();
temp.RemoveAt(0);
list = temp.ConvertAll<string[]>(x => x.Split(' ').ToArray());
List<string> columns = list.FirstOrDefault().ToList();
DataTable dtTemp = new DataTable();
columns.All(x => { dtTemp.Columns.Add(new DataColumn(x)); return true; });
list.All(x => { dtTemp.Rows.Add(dtTemp.NewRow().ItemArray = x); return true; });
return dtTemp;
}
If the PDF contains marked content (you can see how to find this in my blog article http://www.jpedal.org/PDFblog/2010/09/the-easy-way-to-discover-if-a-pdf-file-contains-structured-content/) you can extract it from the PDF file. Otherwise you will need to extract the text and try to guess the structure.