Skip to content

Commit

Permalink
Merge pull request #8637 from liranmauda/liran-convert-test-file-writer
Browse files Browse the repository at this point in the history
CI | Reduce run time | Convert test_file_writer to jest
  • Loading branch information
liranmauda authored Jan 2, 2025
2 parents eec9e18 + 3d7ed57 commit 6a079ed
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 37 deletions.
1 change: 0 additions & 1 deletion src/test/unit_tests/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ require('./test_bucket_chunks_builder');
require('./test_mirror_writer');
require('./test_namespace_fs');
require('./test_ns_list_objects');
require('./test_file_writer');
require('./test_namespace_fs_mpu');
require('./test_nb_native_fs');
require('./test_s3select');
Expand Down
Original file line number Diff line number Diff line change
@@ -1,69 +1,52 @@
/* Copyright (C) 2020 NooBaa */
/* eslint-disable no-invalid-this */
'use strict';

const mocha = require('mocha');
const config = require('../../../config');
const file_writer_hashing = require('../../tools/file_writer_hashing');
const config = require('../../../../config');
const file_writer_hashing = require('../../../tools/file_writer_hashing');
const orig_iov_max = config.NSFS_DEFAULT_IOV_MAX;

// on iov_max small tests we need to use smaller amount of parts and chunks to ensure that the test will finish
// in a reasonable period of time because we will flush max 1/2 buffers at a time.
const small_iov_num_parts = 20;


mocha.describe('FileWriter', function() {
describe('FileWriter', () => {
const RUN_TIMEOUT = 10 * 60 * 1000;

mocha.afterEach(function() {
afterEach(() => {
config.NSFS_DEFAULT_IOV_MAX = orig_iov_max;
});

mocha.it('Concurrent FileWriter with hash target', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with hash target', async () => {
await file_writer_hashing.hash_target();
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with file target', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with file target', async () => {
await file_writer_hashing.file_target();
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with hash target - iov_max=1', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with hash target - iov_max=1', async () => {
await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 1);
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with file target - iov_max=1', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with file target - iov_max=1', async () => {
await file_writer_hashing.file_target(undefined, small_iov_num_parts, 1);
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with hash target - iov_max=2', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with hash target - iov_max=2', async () => {
await file_writer_hashing.hash_target(undefined, small_iov_num_parts, 2);
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with file target - iov_max=2', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with file target - iov_max=2', async () => {
await file_writer_hashing.file_target(undefined, small_iov_num_parts, 2);
});
}, RUN_TIMEOUT);

mocha.it('Concurrent FileWriter with file target - produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L', async function() {
const self = this;
self.timeout(RUN_TIMEOUT);
it('Concurrent FileWriter with file target - produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L', async () => {
// The goal of this test is to produce num_chunks > 1024 && total_chunks_size < config.NSFS_BUF_SIZE_L
// so we will flush buffers because of reaching max num of buffers and not because we reached the max NSFS buf size
// chunk size = 100, num_chunks = (10 * 1024 * 1024)/100 < 104587, 104587 = num_chunks > 1024
// chunk size = 100, total_chunks_size after having 1024 chunks is = 100 * 1024 < config.NSFS_BUF_SIZE_L
const chunk_size = 100;
const parts_s = 50;
await file_writer_hashing.file_target(chunk_size, parts_s);
});
}, RUN_TIMEOUT);
});
1 change: 0 additions & 1 deletion src/test/unit_tests/nc_index.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ coretest.setup();

require('./test_namespace_fs');
require('./test_ns_list_objects');
require('./test_file_writer');
require('./test_namespace_fs_mpu');
require('./test_nb_native_fs');
require('./test_nc_nsfs_cli');
Expand Down

0 comments on commit 6a079ed

Please sign in to comment.