More miscellaneous refactoring

This commit is contained in:
CalDescent 2021-08-14 14:05:33 +01:00
parent 09e783fbf6
commit 8dac3ebf96
4 changed files with 12 additions and 12 deletions

View File

@ -119,7 +119,7 @@ public class ArbitraryDataFile {
return arbitraryDataFile; return arbitraryDataFile;
} catch (IOException e) { } catch (IOException e) {
LOGGER.error("Couldn't compute digest for DataFile"); LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
} }
} }
return null; return null;
@ -187,7 +187,7 @@ public class ArbitraryDataFile {
// Validate the file size // Validate the file size
long fileSize = Files.size(path); long fileSize = Files.size(path);
if (fileSize > MAX_FILE_SIZE) { if (fileSize > MAX_FILE_SIZE) {
LOGGER.error(String.format("DataFile is too large: %d bytes (max size: %d bytes)", fileSize, MAX_FILE_SIZE)); LOGGER.error(String.format("ArbitraryDataFile is too large: %d bytes (max size: %d bytes)", fileSize, MAX_FILE_SIZE));
return ArbitraryDataFile.ValidationResult.FILE_TOO_LARGE; return ArbitraryDataFile.ValidationResult.FILE_TOO_LARGE;
} }
@ -468,7 +468,7 @@ public class ArbitraryDataFile {
return Crypto.digest(fileContent); return Crypto.digest(fileContent);
} catch (IOException e) { } catch (IOException e) {
LOGGER.error("Couldn't compute digest for DataFile"); LOGGER.error("Couldn't compute digest for ArbitraryDataFile");
} }
} }
return null; return null;

View File

@ -166,7 +166,7 @@ public class ArbitraryDataReader {
private void fetchFromFileHash() { private void fetchFromFileHash() {
// Load data file directly from the hash // Load data file directly from the hash
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash58(resourceId); ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash58(resourceId);
// Set filePath to the location of the DataFile // Set filePath to the location of the ArbitraryDataFile
this.filePath = Paths.get(arbitraryDataFile.getFilePath()); this.filePath = Paths.get(arbitraryDataFile.getFilePath());
} }
@ -233,7 +233,7 @@ public class ArbitraryDataReader {
if (!Arrays.equals(arbitraryDataFile.digest(), digest)) { if (!Arrays.equals(arbitraryDataFile.digest(), digest)) {
throw new IllegalStateException("Unable to validate complete file hash"); throw new IllegalStateException("Unable to validate complete file hash");
} }
// Set filePath to the location of the DataFile // Set filePath to the location of the ArbitraryDataFile
this.filePath = Paths.get(arbitraryDataFile.getFilePath()); this.filePath = Paths.get(arbitraryDataFile.getFilePath());
} }
@ -247,7 +247,7 @@ public class ArbitraryDataReader {
AES.decryptFile("AES", aesKey, this.filePath.toString(), this.unencryptedPath.toString()); AES.decryptFile("AES", aesKey, this.filePath.toString(), this.unencryptedPath.toString());
// Replace filePath pointer with the encrypted file path // Replace filePath pointer with the encrypted file path
// Don't delete the original DataFile, as this is handled in the cleanup phase // Don't delete the original ArbitraryDataFile, as this is handled in the cleanup phase
this.filePath = this.unencryptedPath; this.filePath = this.unencryptedPath;
} catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException | NoSuchPaddingException

View File

@ -370,13 +370,13 @@ public class ArbitraryDataManager extends Thread {
ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash); ArbitraryDataFile arbitraryDataFile = ArbitraryDataFile.fromHash(hash);
if (chunkHashes != null && chunkHashes.length > 0) { if (chunkHashes != null && chunkHashes.length > 0) {
arbitraryDataFile.addChunkHashes(chunkHashes); arbitraryDataFile.addChunkHashes(chunkHashes);
for (ArbitraryDataFileChunk dataFileChunk : arbitraryDataFile.getChunks()) { for (ArbitraryDataFileChunk chunk : arbitraryDataFile.getChunks()) {
if (dataFileChunk.exists()) { if (chunk.exists()) {
hashes.add(dataFileChunk.getHash()); hashes.add(chunk.getHash());
//LOGGER.info("Added hash {}", dataFileChunk.getHash58()); //LOGGER.info("Added hash {}", chunk.getHash58());
} }
else { else {
LOGGER.info("Couldn't add hash {} because it doesn't exist", dataFileChunk.getHash58()); LOGGER.info("Couldn't add hash {} because it doesn't exist", chunk.getHash58());
} }
} }
} }

View File

@ -133,7 +133,7 @@ public class HSQLDBArbitraryRepository implements ArbitraryRepository {
arbitraryTransactionData.setData(dataHash); arbitraryTransactionData.setData(dataHash);
arbitraryTransactionData.setDataType(DataType.DATA_HASH); arbitraryTransactionData.setDataType(DataType.DATA_HASH);
// Create DataFile // Create ArbitraryDataFile
ArbitraryDataFile arbitraryDataFile = new ArbitraryDataFile(rawData); ArbitraryDataFile arbitraryDataFile = new ArbitraryDataFile(rawData);
// Verify that the data file is valid, and that it matches the expected hash // Verify that the data file is valid, and that it matches the expected hash