Remove overall file hash checking for now as it interferes with large files

This commit is contained in:
amcmanu3 2024-07-08 16:27:06 -04:00
parent d6e00edf4a
commit b0a38d1249
2 changed files with 1 additions and 40 deletions

View File

@ -104,7 +104,6 @@ class ApiFilesUploadHandler(BaseApiHandler):
}, },
) )
# Get the headers from the request # Get the headers from the request
self.file_hash = self.request.headers.get("fileHash", 0)
self.chunk_hash = self.request.headers.get("chunkHash", 0) self.chunk_hash = self.request.headers.get("chunkHash", 0)
self.file_id = self.request.headers.get("fileId") self.file_id = self.request.headers.get("fileId")
self.chunked = self.request.headers.get("chunked", False) self.chunked = self.request.headers.get("chunked", False)
@ -190,27 +189,6 @@ class ApiFilesUploadHandler(BaseApiHandler):
calculated_hash = self.file_helper.calculate_file_hash( calculated_hash = self.file_helper.calculate_file_hash(
os.path.join(self.upload_dir, self.filename) os.path.join(self.upload_dir, self.filename)
) )
if calculated_hash != self.file_hash:
# If the hash is bad we'll delete the malformed file and send
# a warning
os.remove(os.path.join(self.upload_dir, self.filename))
logger.error(
f"File upload failed. Filename: {self.filename}"
f"Type: {u_type} Error: INVALID HASH"
)
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID HASH",
"data": {
"message": "Hash recieved does not"
" match reported sent hash."
f"Recieved: {calculated_hash} "
f"Expected: {self.file_hash}",
},
},
)
logger.info( logger.info(
f"File upload completed. Filename: {self.filename} Type: {u_type}" f"File upload completed. Filename: {self.filename} Type: {u_type}"
) )
@ -303,20 +281,6 @@ class ApiFilesUploadHandler(BaseApiHandler):
with open(chunk_file, "rb") as infile: with open(chunk_file, "rb") as infile:
outfile.write(infile.read()) outfile.write(infile.read())
os.remove(chunk_file) os.remove(chunk_file)
if self.file_helper.calculate_file_hash(file_path) != self.file_hash:
os.remove(file_path)
return self.finish_json(
400,
{
"status": "error",
"error": "INVALID HASH",
"data": {
"message": "Hash recieved does not"
" match reported sent hash.",
"chunk_id": self.file_id,
},
},
)
logger.info( logger.info(
f"File upload completed. Filename: {self.filename}" f"File upload completed. Filename: {self.filename}"
f" Path: {file_path} Type: {u_type}" f" Path: {file_path} Type: {u_type}"

View File

@ -23,9 +23,8 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
url = `/api/v2/servers/import/upload/` url = `/api/v2/servers/import/upload/`
} }
console.log(url) console.log(url)
const chunkSize = 1024 * 1024; // 1MB const chunkSize = 1024 * 1024 * 10; // 10MB
const totalChunks = Math.ceil(file.size / chunkSize); const totalChunks = Math.ceil(file.size / chunkSize);
const file_hash = await calculateFileHash(file);
const uploadPromises = []; const uploadPromises = [];
let errors = []; // Array to store errors let errors = []; // Array to store errors
@ -35,7 +34,6 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
headers: { headers: {
'X-XSRFToken': token, 'X-XSRFToken': token,
'chunked': true, 'chunked': true,
'fileHash': file_hash,
'fileSize': file.size, 'fileSize': file.size,
'type': type, 'type': type,
'totalChunks': totalChunks, 'totalChunks': totalChunks,
@ -70,7 +68,6 @@ async function uploadFile(type, file = null, path = null, file_num = 0, _onProgr
'Content-Range': `bytes ${start}-${end - 1}/${file.size}`, 'Content-Range': `bytes ${start}-${end - 1}/${file.size}`,
'Content-Length': chunk.size, 'Content-Length': chunk.size,
'fileSize': file.size, 'fileSize': file.size,
'fileHash': file_hash,
'chunkHash': chunk_hash, 'chunkHash': chunk_hash,
'chunked': true, 'chunked': true,
'type': type, 'type': type,