diff --git a/src/file/FileAppendTransaction.js b/src/file/FileAppendTransaction.js index 297f6866b..12af806b3 100644 --- a/src/file/FileAppendTransaction.js +++ b/src/file/FileAppendTransaction.js @@ -60,6 +60,7 @@ export default class FileAppendTransaction extends Transaction { * @param {Uint8Array | string} [props.contents] * @param {number} [props.maxChunks] * @param {number} [props.chunkSize] + * @param {number} [props.chunkInterval] */ constructor(props = {}) { super(); @@ -88,6 +89,12 @@ export default class FileAppendTransaction extends Transaction { */ this._chunkSize = 4096; + /** + * @private + * @type {number} + */ + this._chunkInterval = 10; + this._defaultMaxTransactionFee = new Hbar(5); if (props.fileId != null) { @@ -106,6 +113,10 @@ export default class FileAppendTransaction extends Transaction { this.setChunkSize(props.chunkSize); } + if (props.chunkInterval != null) { + this.setChunkInterval(props.chunkInterval); + } + /** @type {List} */ this._transactionIds = new List(); } @@ -168,6 +179,19 @@ export default class FileAppendTransaction extends Transaction { contents = concat; } + const chunkSize = append.contents?.length || undefined; + const maxChunks = bodies.length || undefined; + let chunkInterval; + if (transactionIds.length > 1) { + const firstValidStart = transactionIds[0].validStart; + const secondValidStart = transactionIds[1].validStart; + if (firstValidStart && secondValidStart) { + chunkInterval = secondValidStart.nanos + .sub(firstValidStart.nanos) + .toNumber(); + } + } + return Transaction._fromProtobufTransactions( new FileAppendTransaction({ fileId: @@ -178,7 +202,10 @@ export default class FileAppendTransaction extends Transaction { ), ) : undefined, - contents: contents, + contents, + chunkSize, + maxChunks, + chunkInterval, }), transactions, signedTransactions, @@ -300,6 +327,22 @@ export default class FileAppendTransaction extends Transaction { return this; } + /** + * @returns {number} + */ + get chunkInterval() { + return this._chunkInterval; + } + + /** + * @param {number} chunkInterval The valid start interval between chunks in nanoseconds + * @returns {this} + */ + setChunkInterval(chunkInterval) { + this._chunkInterval = chunkInterval; + return this; + } + /** * Freeze this transaction from further modification to prepare for * signing or serialization. @@ -344,7 +387,7 @@ export default class FileAppendTransaction extends Transaction { ).seconds, /** @type {Timestamp} */ ( nextTransactionId.validStart - ).nanos.add(1), + ).nanos.add(this._chunkInterval), ), ); } @@ -465,6 +508,10 @@ export default class FileAppendTransaction extends Transaction { */ _buildIncompleteTransactions() { const dummyAccountId = AccountId.fromString("0.0.0"); + const accountId = this.transactionId?.accountId || dummyAccountId; + const validStart = + this.transactionId?.validStart || Timestamp.fromDate(new Date()); + if (this._contents == null) { throw new Error("contents is not set"); } @@ -483,7 +530,10 @@ export default class FileAppendTransaction extends Transaction { this._signedTransactions.clear(); for (let chunk = 0; chunk < this.getRequiredChunks(); chunk++) { - let nextTransactionId = TransactionId.generate(dummyAccountId); + let nextTransactionId = TransactionId.withValidStart( + accountId, + validStart.plusNanos(this._chunkInterval * chunk), + ); this._transactionIds.push(nextTransactionId); this._transactionIds.advance(); diff --git a/test/integration/FileAppendIntegrationTest.js b/test/integration/FileAppendIntegrationTest.js index a7d5083f5..26f5a665f 100644 --- a/test/integration/FileAppendIntegrationTest.js +++ b/test/integration/FileAppendIntegrationTest.js @@ -6,6 +6,8 @@ import { FileInfoQuery, Hbar, Status, + Timestamp, + TransactionId, } from "../../src/exports.js"; import { bigContents } from "./contents.js"; import IntegrationTestEnv from "./client/NodeIntegrationTestEnv.js"; @@ -22,6 +24,7 @@ describe("FileAppend", function () { newContents = generateUInt8Array(newContentsLength); operatorKey = env.operatorKey.publicKey; }); + it("should be executable", async function () { let response = await new FileCreateTransaction() .setKeys([operatorKey]) @@ -351,6 +354,96 @@ describe("FileAppend", function () { expect(receipt.status).to.be.equal(Status.Success); }); + it("should keep transaction id after non-frozen deserialization", async function () { + const operatorKey = env.operatorKey.publicKey; + + let response = await new FileCreateTransaction() + .setKeys([operatorKey]) + .setContents(Buffer.from("")) + .execute(env.client); + + let { fileId } = await response.getReceipt(env.client); + + const chunkInterval = 230; + const validStart = Timestamp.fromDate(new Date()); + + const tx = new FileAppendTransaction() + .setTransactionId( + TransactionId.withValidStart(env.operatorId, validStart), + ) + .setFileId(fileId) + .setChunkInterval(chunkInterval) + .setChunkSize(1000) + .setContents(newContents); + + const txBytes = tx.toBytes(); + const txFromBytes = FileAppendTransaction.fromBytes(txBytes); + + expect( + txFromBytes.transactionId.accountId._toProtobuf(), + ).to.be.deep.equal(env.operatorId?._toProtobuf()); + expect(txFromBytes.transactionId.validStart).to.be.deep.equal( + validStart, + ); + + txFromBytes._transactionIds.list.forEach( + (transactionId, index, array) => { + if (index > 0) { + const previousTimestamp = array[index - 1].validStart; + const currentTimestamp = transactionId.validStart; + const difference = + currentTimestamp.nanos - previousTimestamp.nanos; + expect(difference).to.be.equal(chunkInterval); + } + }, + ); + + txFromBytes.freezeWith(env.client); + await txFromBytes.sign(env.operatorKey); + + const receipt = await ( + await txFromBytes.execute(env.client) + ).getReceipt(env.client); + expect(receipt.status).to.be.equal(Status.Success); + }); + + it("should keep chunk size, chunk interval and correct max chunks after deserialization", async function () { + const operatorKey = env.operatorKey.publicKey; + const chunkSize = 1024; + const chunkInterval = 230; + + let response = await new FileCreateTransaction() + .setKeys([operatorKey]) + .setContents(Buffer.from("")) + .execute(env.client); + + let { fileId } = await response.getReceipt(env.client); + + const tx = new FileAppendTransaction() + .setFileId(fileId) + .setChunkSize(chunkSize) + .setChunkInterval(chunkInterval) + .setMaxChunks(99999) + .setContents(newContents); + + const txBytes = tx.toBytes(); + const txFromBytes = FileAppendTransaction.fromBytes(txBytes); + + expect(txFromBytes.chunkSize).to.be.equal(1024); + expect(txFromBytes.maxChunks).to.be.equal( + txFromBytes.getRequiredChunks(), + ); + expect(txFromBytes.chunkInterval).to.be.equal(230); + + txFromBytes.freezeWith(env.client); + await txFromBytes.sign(env.operatorKey); + + const receipt = await ( + await txFromBytes.execute(env.client) + ).getReceipt(env.client); + expect(receipt.status).to.be.equal(Status.Success); + }); + after(async function () { await env.close(); }); diff --git a/test/unit/FileAppendTransaction.js b/test/unit/FileAppendTransaction.js index 8424256f4..5eae37978 100644 --- a/test/unit/FileAppendTransaction.js +++ b/test/unit/FileAppendTransaction.js @@ -11,12 +11,15 @@ import { import Long from "long"; describe("FileAppendTransaction", function () { + const spenderAccountId1 = new AccountId(7); + const fileId = new FileId(8); + const nodeAccountId = new AccountId(10, 11, 12); + const timestamp1 = new Timestamp(14, 15); + const fee = new Hbar(5); + const chunkSize = 1000; + it("setChunkSize()", function () { - const spenderAccountId1 = new AccountId(7); - const fileId = new FileId(8); - const nodeAccountId = new AccountId(10, 11, 12); - const timestamp1 = new Timestamp(14, 15); - const fee = new Hbar(5); + const contents = "1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000); let transaction = new FileAppendTransaction() .setTransactionId( @@ -24,8 +27,8 @@ describe("FileAppendTransaction", function () { ) .setNodeAccountIds([nodeAccountId]) .setFileId(fileId) - .setChunkSize(1000) - .setContents("1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000)) + .setChunkSize(chunkSize) + .setContents(contents) .freeze(); const transactionId = transaction.transactionId; @@ -88,4 +91,51 @@ describe("FileAppendTransaction", function () { expect(body.fileAppend.contents.length).to.be.equal(1000); expect(body.fileAppend.contents[0]).to.be.equal(51); }); + + it("setChunkInterval()", function () { + const contents = "1".repeat(1000) + "2".repeat(1000) + "3".repeat(1000); + const chunkInterval = 200; + + let transaction = new FileAppendTransaction() + .setTransactionId( + TransactionId.withValidStart(spenderAccountId1, timestamp1), + ) + .setNodeAccountIds([nodeAccountId]) + .setFileId(fileId) + .setChunkSize(chunkSize) + .setContents(contents) + .setChunkInterval(chunkInterval) + .freeze(); + + expect(transaction._transactionIds.list.length).to.be.equal(3); + const requiredChunks = contents.length / chunkSize; + + let body = transaction._makeTransactionBody(nodeAccountId); + + expect(body.transactionID).to.deep.equal( + transaction._transactionIds.list[0]._toProtobuf(), + ); + + for (let i = 1; i < requiredChunks; i++) { + transaction._transactionIds.advance(); + body = transaction._makeTransactionBody(nodeAccountId); + expect(body.transactionID).to.deep.equal( + transaction._transactionIds.list[i]._toProtobuf(), + ); + + expect( + transaction._transactionIds.list[i].validStart.nanos.sub( + transaction._transactionIds.list[i - 1].validStart.nanos, + ), + ).to.deep.equal(Long.fromNumber(chunkInterval)); + } + + expect( + transaction._transactionIds.list[ + requiredChunks - 1 + ].validStart.nanos.sub( + transaction._transactionIds.list[0].validStart.nanos, + ), + ).to.deep.equal(Long.fromNumber(chunkInterval * (requiredChunks - 1))); + }); });