Hi,
Since there is a limit of 1.5 mb approx in request of Sharepoint online, I am trying to upload files with more than 1mb in chunks. Below is the code that I am using.
I am getting error as below in newFile.continueUpload(). Although the chunkStream is not null or undefined. Pleas help
Value cannot be null.
Parameter name: stream
that.insertDocument = function (file, Id, docType, resolutionId, TicketNumber, handleLoader = true) {
var deferred = $.Deferred();
var newGuid = SP.Guid.newGuid().toString();
var clientContext = SP.ClientContext.get_current();
var web = clientContext.get_web();
clientContext.load(web, 'ServerRelativeUrl');
clientContext.executeQueryAsync(
async function () {
var folderServerRelativeUrl = web.get_serverRelativeUrl() + '/ArcilDocuments';
console.log(folderServerRelativeUrl);
var docLib = web.getFolderByServerRelativeUrl(folderServerRelativeUrl);
folder = docLib;
var minKb = 0.5;
var minChunk = 1024 * 1024 * minKb;
var chunkSize = 1024 * 1024 * minKb;
var fileSize = file.size;
var fileName = file.name;
var names = fileName.split('.');
var ext = names.pop();
var newName = TicketNumber.replace(/\//g, '-') + '-' + names[0] + '-' + new Date().getTime() + '.' + ext;
var fileCreateInfo = new SP.FileCreationInformation();
fileCreateInfo.set_url(newName);
fileCreateInfo.set_overwrite(true);
var chunkIndex = 0;
var offset = 0;
if (chunkSize < fileSize) {
var finalCheck = false;
var chunkReader = new FileReader();
console.log('fileSize', fileSize)
var isLast = false;
//var uploadedChunk = new SP.Base64EncodedByteArray();
var chunkStream = new SP.Base64EncodedByteArray();
chunkReader.onload = async function (evt) {
if (evt.target.error == null) {
var bufferData = new Uint8Array(evt.target.result);
var uploadedChunk = new SP.Base64EncodedByteArray();
for (var i = 0; i < bufferData.length; i++) {
chunkStream.append(bufferData[i]);
}
//var blob = new Blob([bufferData]);
//// Create a file object from the Blob
//var file = new File([blob], newName);
console.log(chunkStream);
if (!isLast) {
if (chunkIndex == 0) {
var parameters = new SP.FileCreationInformation();
parameters.set_url(newName); // Make sure newName is a valid file name
parameters.set_overwrite(true);
parameters.set_content(chunkStream);
var newFile = docLib.get_files().add(parameters);
var listItem = newFile.get_listItemAllFields();
listItem.set_item('Year', new Date().getFullYear());
listItem.set_item('TicketId', Id);
listItem.set_item('DocumentType', docType);
listItem.set_item('UploadedOn', new Date());
listItem.set_item('ResolutionId', resolutionId);
listItem.set_item('DocumentStatus', true);
listItem.update();
clientContext.load(newFile);
console.log('sdsadsfsdfd')
await clientContext.executeQueryAsync(
async function () {
newFile.startUpload(newGuid, offset, chunkStream);
await clientContext.executeQueryAsync(
async function () {
var totalBytesUploaded = chunkStream.get_length();
console.log("Total bytes uploaded: " + totalBytesUploaded);
chunkIndex++;
offset += chunkSize;
if (offset < fileSize) {
if (((fileSize - offset)) <= minChunk) {
isLast = true;
}
readNextChunk();
}
},
function (sender, args) {
console.log("Error fetching bytes uploaded: " + args.get_message());
}
);
},
function (sender, args) { }
);
}
else {
newFile = docLib.get_files().getByUrl(newName);
clientContext.load(newFile);
await clientContext.executeQueryAsync(
async function () {
newFile.continueUpload(newGuid, offset, chunkStream);
clientContext.executeQueryAsync(
function () {
var totalBytesUploaded = chunkStream.get_length();
console.log("Total bytes uploaded continue: " + totalBytesUploaded);
chunkIndex++;
offset += chunkSize;
if (offset < fileSize) {
if (((fileSize - offset)) <= minChunk) {
isLast = true;
}
readNextChunk();
}
},
function (sender, args) {
console.log(chunkStream, offset);
console.log("Error fetching bytes uploaded: " + args.get_message());
}
);
},
function (sender, args) { }
);
}
}
else {
newFile = docLib.get_files().getByUrl(newName);
clientContext.load(newFile);
await clientContext.executeQueryAsync(
async function () {
newFile.finishUpload(newGuid, offset, chunkStream);
await clientContext.executeQueryAsync(
function () {
var totalBytesUploaded = chunkStream.get_length();
console.log("Total bytes uploaded completed: " + totalBytesUploaded);
},
function (sender, args) {
console.log(chunkStream, offset);
console.log("Error fetching bytes uploaded: " + args.get_message());
});
},
function (sender, args) { }
);
}
}
}
var readNextChunk = function () {
var blob = file.slice(offset, offset + minChunk);
chunkReader.readAsArrayBuffer(blob);
//chunkReader.readAsArrayBuffer(file);
};
readNextChunk();
}
else {
var reader = new FileReader();
reader.onloadend = async function () {
var base64Data = reader.result.split(',')[1];
fileCreateInfo.set_content(new SP.Base64EncodedByteArray(base64Data));
// Add the file to the folder
var uploadedFile = folder.get_files().add(fileCreateInfo);
var listItem = uploadedFile.get_listItemAllFields();
listItem.set_item('Year', new Date().getFullYear());
listItem.set_item('TicketId', Id);
listItem.set_item('DocumentType', docType);
listItem.set_item('UploadedOn', new Date());
listItem.set_item('ResolutionId', resolutionId);
listItem.set_item('DocumentStatus', true);
listItem.update();
clientContext.load(uploadedFile);
await clientContext.executeQueryAsync(
function () {
var fileUrl = uploadedFile.get_serverRelativeUrl();
if (handleLoader == true) {
commonHelper.showHideLoader(false);
}
//
deferred.resolve(fileUrl);
},
function (sender, args) {
if (handleLoader == true) {
commonHelper.showHideLoader(false);
}
deferred.resolve('');
}
);
};
reader.readAsDataURL(file);
}
})
return deferred.promise();
};