import { Controller } from "@hotwired/stimulus" export default class UploadController extends Controller { static targets = [ 'uploadBtn', 'progressbar', 'progressbarProgress', ]; static values = { siteId: Number, }; upload(ev) { ev.preventDefault(); this._promptForUpload((files) => { files.sort((a, b) => b.lastModified - a.lastModified); this._doUploads(files); }) } _promptForUpload(onAccept) { const input = document.createElement('input'); input.type = 'file'; input.accept = 'image/*'; input.multiple = true; input.onchange = (e) => { const files = Array.from(e.target.files); if (files.length > 0) { onAccept(files); } }; input.click(); } async _doUploads(files) { this.uploadBtnTarget.disabled = true; this._showUploadProgressBar(); for (let i = 0; i < files.length; i++) { await this._doUpload(files[i], i, files.length); } window.location.reload(); } async _doUpload(file, thisFileIndex, nFiles) { console.log(`Uploading ${file.name}: new pending`); // Prepare upload of file supplying size and mime-type let newPending = await (await fetch(`/sites/${this.siteIdValue}/uploads/pending`, { method: 'POST', headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }, body: JSON.stringify({ size: file.size, mime: file.type, name: file.name, }) })).json(); // Upload file in 2 MB blocks let offset = 0; let chunkSize = 2 * 1024 * 1024; while (offset < file.size) { let chunk = file.slice(offset, offset + chunkSize); console.log(`Uploading ${file.name}: uploading part`); await this._uploadChunk(`/sites/${this.siteIdValue}/uploads/pending/${newPending.guid}`, chunk, { chunkOffset: offset, totalSize: file.size, thisFileIndex, nFiles, }); offset += chunkSize; } // Calculate SHA256 hash const hash = await this._calculateSHA256(file); // Finalise upload console.log(`Uploading ${file.name}: finalise`); await fetch(`/sites/${this.siteIdValue}/uploads/pending/${newPending.guid}/finalize`, { method: 'POST', headers: { 'Accept': 'application/json', 'Content-Type': 'application/json' }, body: JSON.stringify({ hash: hash }) }); } _uploadChunk(url, chunk, progressInfo) { let { chunkOffset, totalSize, thisFileIndex, nFiles } = progressInfo; return new Promise((resolve, reject) => { const xhr = new XMLHttpRequest(); xhr.upload.addEventListener('progress', (e) => { if (e.lengthComputable) { const bytesUploaded = chunkOffset + e.loaded; const fractionalCompleteOfThisFile = +bytesUploaded / +totalSize; const percentComplete = (thisFileIndex + fractionalCompleteOfThisFile) * 100 / nFiles; console.log(`Uploading ${chunk.name}: ${percentComplete.toFixed(2)}%`); this.progressbarProgressTarget.style.width = `${percentComplete}%`; } }); xhr.addEventListener('load', () => { if (xhr.status >= 200 && xhr.status < 300) { resolve(); } else { reject(new Error(`Upload failed with status ${xhr.status}`)); } }); xhr.addEventListener('error', () => reject(new Error('Upload failed'))); xhr.addEventListener('abort', () => reject(new Error('Upload aborted'))); xhr.open('POST', url); xhr.setRequestHeader('Content-Type', 'application/octet-stream'); xhr.send(chunk); }); } _showUploadProgressBar() { this.progressbarTarget.classList.remove('d-none'); this.progressbarProgressTarget.style.width = '0%'; } async _calculateSHA256(file) { const arrayBuffer = await file.arrayBuffer(); const hashBuffer = await crypto.subtle.digest('SHA-256', arrayBuffer); const hashArray = Array.from(new Uint8Array(hashBuffer)); const hashHex = hashArray.map(b => b.toString(16).padStart(2, '0')).join(''); return hashHex; } }