I am developing a web app in jQuery and Java. I have the task of making a report with some database records. The report must be able to be downloaded in csv, xls and txt format. The number of records of the file are variable, but some times are too big, like ten million of records.
I have two questions:
I currently have the implementation of the report in csv format in jQuery, and it works well, the detail is that when the report has more records than 800,000, the memory of the browser ends and ends in error. do you know some way to fix it?
This is the code:
$("#btnSaveReporte").on(
'click',
function(event) {
getValuesSelected();
var valiFec = validarFechasEnUso();
if (validarFechasEnUso()) {
if (validarCampos()){
// get data report
$.ajax({
type : "POST",
url : "/VentasB/api/getDataReport",
data : {
fechaIni : fechaIni,
fechaFin : fechaFin,
orgVentas : Object.keys(orgVentas).join(',').replace(/'/g, ""),
canalDist : Object.keys(canalDist).join(','),
//canalDist : "'10','11','12'",
sector : Object.keys(sector).join(',').replace(/'/g, ""),
ofiVentas : $.map(ofiVentas, function(obj) { return "'" + obj.value + "'" }).join(','),
},
success : function(result) {
console.log(result);
if(result.data.length > 0){
// CSV
var fileName = "Reporte_Ventas_" + fechaIni
+ "_to_" + fechaFin + ".csv";
exportDataToCSV(result.data, fileName,
"#reportFile");
// trigger de click para descargar automaticamente el reporte
$("#reportFile")[0].click();
}else{
Swal.fire({
icon : 'info',
title : 'Sin resultados',
text : 'no se econtró ningún registro que cumpla con los parametros de busqueda.'
})
}
}
});
}
} else {
// swal error
}
});
// ******************** funcion para descargar csv *********************
function exportDataToCSV(data, title, btnContainer) {
var csv = ""
for (var g = 0; g < data.length; g++) {
// for(var g=0; g < 100; g++){
// console.log("entro data")
// console.log(data[g])
if (g == 0) {
csv += "bill_stmnt_id, distributor_id, bill_type_cd, bill_stmnt_base_id, eff_dt, eff_tm, bill_distrib_channel_cd,"
+ "total_charge_amt, total_tax_amt, subtotal_amt, discounts, bill_equipment_deposit, price, perc_fin, amt_fin,"
+ "bill_rfc_ini, bill_customer_cd, bill_shop_cd, bill_shop_name,bill_shop_id, bill_org_name, bill_address,"
+ "bill_move_type_cd, bill_payment_condition_cd, region_id, bill_customer_group_cd, bill_customer_group_desc,"
+ "bill_user_id, bill_sale_force_desc, bill_sale_reason_cd, bill_rfc_end, bill_source_type_cd";
csv += "\n";
}
csv += '"' + data[g].bill_stmnt_id + '",';
csv += '"' + data[g].distributor_id + '",';
csv += '"' + data[g].bill_type_cd + '",';
csv += '"' + data[g].bill_stmnt_base_id + '",';
csv += '"' + data[g].eff_dt + '",';
csv += '"' + data[g].eff_tm + '",';
csv += '"' + data[g].bill_distrib_channel_cd + '",';
csv += '"' + data[g].total_charge_amt + '",';
csv += '"' + data[g].total_tax_amt + '",';
csv += '"' + data[g].subtotal_amt + '",';
csv += '"' + data[g].discounts + '",';
csv += '"' + data[g].bill_equipment_deposit + '",';
csv += '"' + data[g].price + '",';
csv += '"' + data[g].perc_fin + '",';
csv += '"' + data[g].amt_fin + '",';
csv += '"' + data[g].bill_rfc_ini + '",';
csv += '"' + data[g].bill_customer_cd + '",';
csv += '"' + data[g].bill_shop_cd + '",';
csv += '"' + data[g].bill_shop_name + '",';
csv += '"' + data[g].bill_shop_id + '",';
csv += '"' + data[g].bill_org_name + '",';
csv += '"' + data[g].bill_address + '",';
csv += '"' + data[g].bill_move_type_cd + '",';
csv += '"' + data[g].bill_payment_condition_cd + '",';
csv += '"' + data[g].region_id + '",';
csv += '"' + data[g].bill_customer_group_cd + '",';
csv += '"' + data[g].bill_customer_group_desc + '",';
csv += '"' + data[g].bill_user_id + '",';
csv += '"' + data[g].bill_sale_force_desc + '",';
csv += '"' + data[g].bill_sale_reason_cd + '",';
csv += '"' + data[g].bill_rfc_end + '",';
csv += '"' + data[g].bill_source_type_cd + '",';
// csv += "hola" + "," ;
csv += "\n";
}
// Deliberate 'false', see comment below
if (false && window.navigator.msSaveBlob) {
console.log("entro 1")
var blob = new Blob([ decodeURIComponent(csv) ], {
type : 'text/csv;charset=utf8'
});
// Crashes in IE 10, IE 11 and Microsoft Edge
// See MS Edge Issue #10396033
// Hence, the deliberate 'false'
// This is here just for completeness
// Remove the 'false' at your own risk
window.navigator.msSaveBlob(blob, title);
} else if (window.Blob && window.URL) {
console.log("entro 2")
// HTML5 Blob
var blob = new Blob([ csv ], {
type : 'text/csv;charset=utf-8'
});
var csvUrl = URL.createObjectURL(blob);
$(btnContainer).attr({
'download' : title,
'href' : csvUrl
});
} else {
console.log("entro 3")
// Data URI
var csvData = 'data:application/csv;charset=utf-8,'
+ encodeURIComponent(csv);
$(btnContainer).attr({
'download' : title,
'href' : csvData,
'target' : '_blank'
});
}
}
I really appreciate your help.
With a large file, you do not want to build it in memory and then download it, because as you have seen, you will run out of memory. The ideal workflow is to build it just as it is being downloaded, so that the portion you need to memorize is minimal.
Since you are building a web application in Java, you are probably using a framework, instead of implementing an HTTP server by yourself. The libraries you are using probably have support for serving something big as it is being requested, while the rest of the application can continue working (streaming). The best approach is to use this functionality.
For example, if you were using Spring MVC, I would implement downloading of a big file as follows:
@GetMapping(value="/file/{id}")
public StreamingResponseBody getBigFile(@PathVariable long id) throws IOException {
File f = /* find the file here, for example using id*/;
InputStream in = new BufferedInputStream(new FileInputStream(f));
return new StreamingResponseBody() {
@Override
public void writeTo(OutputStream os) throws IOException {
FileCopyUtils.copy(in, os);
}
};
}
You can go one step further, and actually write to the stream only as it gets written; that way, you do not have to store the big file locally before you send it on.
Webworkers + fetch + stream api may be able to help you to stream data into a file without storing it into memory. I have never tried this, and am happily surprised that it seems to be possible. Beware that not all browsers will support the necessary web APIs
Avoid unnecessary copies, and stream instead of buffering whenever possible. It saves a lot of memory, and also avoids doing work before it will be needed.
The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.