Create writeLength function in LibBytes
This commit is contained in:
parent
d131c39e46
commit
9a3a302754
@ -616,4 +616,18 @@ library LibBytes {
|
|||||||
sourceLen
|
sourceLen
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// @dev Writes a new length to a byte array.
|
||||||
|
/// Decreasing length will lead to removing the corresponding lower order bytes from the byte array.
|
||||||
|
/// Increasing length may lead to appending adjacent in-memory bytes to the end of the byte array.
|
||||||
|
/// @param b Bytes array to write new length to.
|
||||||
|
/// @param length New length of byte array.
|
||||||
|
function writeLength(bytes memory b, uint256 length)
|
||||||
|
internal
|
||||||
|
pure
|
||||||
|
{
|
||||||
|
assembly {
|
||||||
|
mstore(b, length)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -303,4 +303,26 @@ contract TestLibBytes {
|
|||||||
result = LibBytes.sliceDestructive(b, from, to);
|
result = LibBytes.sliceDestructive(b, from, to);
|
||||||
return (result, b);
|
return (result, b);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// @dev Returns a byte array with an updated length.
|
||||||
|
/// @dev Writes a new length to a byte array.
|
||||||
|
/// Decreasing length will lead to removing the corresponding lower order bytes from the byte array.
|
||||||
|
/// Increasing length may lead to appending adjacent in-memory bytes to the end of the byte array.
|
||||||
|
/// @param b Bytes array to write new length to.
|
||||||
|
/// @param length New length of byte array.
|
||||||
|
/// @param extraBytes Bytes that are appended to end of b in memory.
|
||||||
|
function publicWriteLength(
|
||||||
|
bytes memory b,
|
||||||
|
uint256 length,
|
||||||
|
bytes memory extraBytes
|
||||||
|
)
|
||||||
|
public
|
||||||
|
pure
|
||||||
|
returns (bytes memory)
|
||||||
|
{
|
||||||
|
uint256 bEnd = b.contentAddress() + b.length;
|
||||||
|
LibBytes.memCopy(bEnd, extraBytes.contentAddress(), extraBytes.length);
|
||||||
|
b.writeLength(length);
|
||||||
|
return b;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -668,7 +668,7 @@ describe('LibBytes', () => {
|
|||||||
describe('writeBytesWithLength', () => {
|
describe('writeBytesWithLength', () => {
|
||||||
it('should successfully write short, nested array of bytes when it takes up the whole array', async () => {
|
it('should successfully write short, nested array of bytes when it takes up the whole array', async () => {
|
||||||
const testBytesOffset = new BigNumber(0);
|
const testBytesOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
|
const emptyByteArray = ethUtil.bufferToHex(ethUtil.toBuffer(shortTestBytesAsBuffer.byteLength));
|
||||||
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
testBytesOffset,
|
testBytesOffset,
|
||||||
@ -683,7 +683,7 @@ describe('LibBytes', () => {
|
|||||||
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
||||||
const prefixOffset = new BigNumber(0);
|
const prefixOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(
|
const emptyByteArray = ethUtil.bufferToHex(
|
||||||
new Buffer(prefixDataAsBuffer.byteLength + shortTestBytesAsBuffer.byteLength),
|
ethUtil.toBuffer(prefixDataAsBuffer.byteLength + shortTestBytesAsBuffer.byteLength),
|
||||||
);
|
);
|
||||||
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
@ -703,7 +703,7 @@ describe('LibBytes', () => {
|
|||||||
});
|
});
|
||||||
it('should successfully write a nested array of bytes - one word in length - when it takes up the whole array', async () => {
|
it('should successfully write a nested array of bytes - one word in length - when it takes up the whole array', async () => {
|
||||||
const testBytesOffset = new BigNumber(0);
|
const testBytesOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(new Buffer(wordOfTestBytesAsBuffer.byteLength));
|
const emptyByteArray = ethUtil.bufferToHex(ethUtil.toBuffer(wordOfTestBytesAsBuffer.byteLength));
|
||||||
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
testBytesOffset,
|
testBytesOffset,
|
||||||
@ -718,7 +718,7 @@ describe('LibBytes', () => {
|
|||||||
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
||||||
const prefixOffset = new BigNumber(0);
|
const prefixOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(
|
const emptyByteArray = ethUtil.bufferToHex(
|
||||||
new Buffer(prefixDataAsBuffer.byteLength + wordOfTestBytesAsBuffer.byteLength),
|
ethUtil.toBuffer(prefixDataAsBuffer.byteLength + wordOfTestBytesAsBuffer.byteLength),
|
||||||
);
|
);
|
||||||
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
@ -738,7 +738,7 @@ describe('LibBytes', () => {
|
|||||||
});
|
});
|
||||||
it('should successfully write a long, nested bytes when it takes up the whole array', async () => {
|
it('should successfully write a long, nested bytes when it takes up the whole array', async () => {
|
||||||
const testBytesOffset = new BigNumber(0);
|
const testBytesOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(new Buffer(longTestBytesAsBuffer.byteLength));
|
const emptyByteArray = ethUtil.bufferToHex(ethUtil.toBuffer(longTestBytesAsBuffer.byteLength));
|
||||||
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
const bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
testBytesOffset,
|
testBytesOffset,
|
||||||
@ -753,7 +753,7 @@ describe('LibBytes', () => {
|
|||||||
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
const prefixDataAsBuffer = ethUtil.toBuffer(prefixData);
|
||||||
const prefixOffset = new BigNumber(0);
|
const prefixOffset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(
|
const emptyByteArray = ethUtil.bufferToHex(
|
||||||
new Buffer(prefixDataAsBuffer.byteLength + longTestBytesAsBuffer.byteLength),
|
ethUtil.toBuffer(prefixDataAsBuffer.byteLength + longTestBytesAsBuffer.byteLength),
|
||||||
);
|
);
|
||||||
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
let bytesWritten = await libBytes.publicWriteBytesWithLength.callAsync(
|
||||||
emptyByteArray,
|
emptyByteArray,
|
||||||
@ -769,7 +769,7 @@ describe('LibBytes', () => {
|
|||||||
});
|
});
|
||||||
it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
|
it('should fail if the byte array is too short to hold the length of a nested byte array', async () => {
|
||||||
const offset = new BigNumber(0);
|
const offset = new BigNumber(0);
|
||||||
const emptyByteArray = ethUtil.bufferToHex(new Buffer(1));
|
const emptyByteArray = ethUtil.bufferToHex(ethUtil.toBuffer(1));
|
||||||
const inputLen = new BigNumber((longData.length - 2) / 2);
|
const inputLen = new BigNumber((longData.length - 2) / 2);
|
||||||
const expectedError = new LibBytesRevertErrors.InvalidByteOperationError(
|
const expectedError = new LibBytesRevertErrors.InvalidByteOperationError(
|
||||||
LibBytesRevertErrors.InvalidByteOperationErrorCodes.LengthGreaterThanOrEqualsNestedBytesLengthRequired,
|
LibBytesRevertErrors.InvalidByteOperationErrorCodes.LengthGreaterThanOrEqualsNestedBytesLengthRequired,
|
||||||
@ -781,7 +781,7 @@ describe('LibBytes', () => {
|
|||||||
).to.revertWith(expectedError);
|
).to.revertWith(expectedError);
|
||||||
});
|
});
|
||||||
it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => {
|
it('should fail if the length between the offset and end of the byte array is too short to hold the length of a nested byte array', async () => {
|
||||||
const emptyByteArray = ethUtil.bufferToHex(new Buffer(shortTestBytesAsBuffer.byteLength));
|
const emptyByteArray = ethUtil.bufferToHex(ethUtil.toBuffer(shortTestBytesAsBuffer.byteLength));
|
||||||
const badOffset = new BigNumber(ethUtil.toBuffer(shortTestBytesAsBuffer).byteLength);
|
const badOffset = new BigNumber(ethUtil.toBuffer(shortTestBytesAsBuffer).byteLength);
|
||||||
const inputLen = new BigNumber((shortData.length - 2) / 2);
|
const inputLen = new BigNumber((shortData.length - 2) / 2);
|
||||||
const expectedError = new LibBytesRevertErrors.InvalidByteOperationError(
|
const expectedError = new LibBytesRevertErrors.InvalidByteOperationError(
|
||||||
@ -1074,5 +1074,63 @@ describe('LibBytes', () => {
|
|||||||
expect(result).to.eq(byteArrayLongerThan32Bytes);
|
expect(result).to.eq(byteArrayLongerThan32Bytes);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
describe('writeLength', () => {
|
||||||
|
it('should return a null byte array if length is set to 0', async () => {
|
||||||
|
const result = await libBytes.publicWriteLength.callAsync(
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
constants.ZERO_AMOUNT,
|
||||||
|
constants.NULL_BYTES,
|
||||||
|
);
|
||||||
|
expect(result).to.eq(constants.NULL_BYTES);
|
||||||
|
});
|
||||||
|
it('should return the same byte array if length is unchanged', async () => {
|
||||||
|
const byteLen = (byteArrayLongerThan32Bytes.length - 2) / 2;
|
||||||
|
const result = await libBytes.publicWriteLength.callAsync(
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
new BigNumber(byteLen),
|
||||||
|
constants.NULL_BYTES,
|
||||||
|
);
|
||||||
|
expect(result).to.eq(byteArrayLongerThan32Bytes);
|
||||||
|
});
|
||||||
|
it('should shave off lower order bytes if new length is less than original', async () => {
|
||||||
|
const byteLen = (byteArrayLongerThan32Bytes.length - 2) / 2;
|
||||||
|
const newLen = new BigNumber(byteLen).dividedToIntegerBy(2);
|
||||||
|
const result = await libBytes.publicWriteLength.callAsync(
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
newLen,
|
||||||
|
constants.NULL_BYTES,
|
||||||
|
);
|
||||||
|
expect(result).to.eq(
|
||||||
|
byteArrayLongerThan32Bytes.slice(
|
||||||
|
0,
|
||||||
|
newLen
|
||||||
|
.multipliedBy(2)
|
||||||
|
.plus(2)
|
||||||
|
.toNumber(),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
});
|
||||||
|
it("should right pad with 0's if new length is greater than original and no extra bytes are appended", async () => {
|
||||||
|
const byteLen = (byteArrayLongerThan32Bytes.length - 2) / 2;
|
||||||
|
const newLen = new BigNumber(byteLen).multipliedBy(2);
|
||||||
|
const result = await libBytes.publicWriteLength.callAsync(
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
newLen,
|
||||||
|
constants.NULL_BYTES,
|
||||||
|
);
|
||||||
|
expect(result).to.eq(`${byteArrayLongerThan32Bytes}${'0'.repeat(byteArrayLongerThan32Bytes.length - 2)}`);
|
||||||
|
});
|
||||||
|
it('should right pad with extra bytes if specified', async () => {
|
||||||
|
const byteLen = (byteArrayLongerThan32Bytes.length - 2) / 2;
|
||||||
|
const newLen = new BigNumber(byteLen).multipliedBy(2);
|
||||||
|
const result = await libBytes.publicWriteLength.callAsync(
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
newLen,
|
||||||
|
byteArrayLongerThan32Bytes,
|
||||||
|
);
|
||||||
|
expect(result).to.eq(`${byteArrayLongerThan32Bytes}${byteArrayLongerThan32Bytes.slice(2)}`);
|
||||||
|
});
|
||||||
|
});
|
||||||
});
|
});
|
||||||
// tslint:disable:max-file-line-count
|
// tslint:disable:max-file-line-count
|
||||||
|
Loading…
x
Reference in New Issue
Block a user