Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Do not dump Babelfish initialize user #1841

Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 23 additions & 27 deletions .github/composite-actions/dump-restore-util/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ inputs:
description: "Logical Babelfish database to dump and restore"
required: false
default: 'null'
dump_data_as:
description: "Dump table data using COPY or INSERT"
required: false
default: 'copy'
dump_format:
description: "Dump format (plain/custom/tar/directory)"
required: true

runs:
using: "composite"
Expand All @@ -44,14 +51,22 @@ runs:
mkdir -p upgrade
cd upgrade
export PGPASSWORD=12345678

if [[ '${{ inputs.dump_data_as }}' == 'inserts' ]];then
export DUMP_OPTS='--column-inserts'
else
export DUMP_OPTS=''
fi
export DUMP_OPTS="$DUMP_OPTS --format=${{ inputs.dump_format }}"

if [[ '${{ inputs.logical_database }}' == 'null' ]];then
echo 'Starting to dump whole Babelfish physical database'
~/${{ inputs.pg_new_dir }}/bin/pg_dumpall -h localhost --database jdbc_testdb --username jdbc_user --globals-only --quote-all-identifiers --verbose --no-role-passwords -f pg_dump_globals.sql 2>>error.log
~/${{ inputs.pg_new_dir }}/bin/pg_dump -h localhost --username jdbc_user --quote-all-identifiers --verbose --file="pg_dump.sql" --dbname=jdbc_testdb 2>>error.log
~/${{ inputs.pg_new_dir }}/bin/pg_dump -h localhost --username jdbc_user $DUMP_OPTS --quote-all-identifiers --verbose --file="pg_dump.archive" --dbname=jdbc_testdb 2>>error.log
else
echo "Starting to dump Babelfish logical database ${{ inputs.logical_database }}"
~/${{ inputs.pg_new_dir }}/bin/pg_dumpall -h localhost --database jdbc_testdb --username jdbc_user --globals-only --quote-all-identifiers --verbose --no-role-passwords --bbf-database-name='${{ inputs.logical_database }}' -f pg_dump_globals.sql 2>>error.log
~/${{ inputs.pg_new_dir }}/bin/pg_dump -h localhost --username jdbc_user --quote-all-identifiers --verbose --bbf-database-name='${{ inputs.logical_database }}' --file="pg_dump.sql" --dbname=jdbc_testdb 2>>error.log
~/${{ inputs.pg_new_dir }}/bin/pg_dump -h localhost --username jdbc_user $DUMP_OPTS --quote-all-identifiers --verbose --bbf-database-name='${{ inputs.logical_database }}' --file="pg_dump.archive" --dbname=jdbc_testdb 2>>error.log
fi

# Stop old server and start the new.
Expand All @@ -63,35 +78,16 @@ runs:
# Create and initialise Babelfish extensions in the new server to perform restore.
sudo ~/${{ inputs.pg_new_dir }}/bin/psql -d postgres -U runner -v user="jdbc_user" -v db="jdbc_testdb" -v migration_mode=${{inputs.migration_mode}} -f .github/scripts/create_extension.sql
echo 'Restoring from pg_dumpall'
sudo PGPASSWORD=12345678 ~/${{ inputs.pg_new_dir }}/bin/psql -h localhost -d jdbc_testdb -U jdbc_user -f ~/upgrade/pg_dump_globals.sql 2>> ~/upgrade/error.log
sudo PGPASSWORD=12345678 ~/${{ inputs.pg_new_dir }}/bin/psql -h localhost -d jdbc_testdb -U jdbc_user --single-transaction -f ~/upgrade/pg_dump_globals.sql 2>> ~/upgrade/error.log
echo 'Restoring from pg_dump'
sudo PGPASSWORD=12345678 ~/${{ inputs.pg_new_dir }}/bin/psql -h localhost -d jdbc_testdb -U jdbc_user -f ~/upgrade/pg_dump.sql 2>> ~/upgrade/error.log

export PATH=/opt/mssql-tools/bin:$PATH
sqlcmd -S localhost -U jdbc_user -P 12345678 -Q "SELECT @@version GO"
shell: bash

# Temporary step
- name: Temporarily disable failing tests
if: always() && steps.run-pg_dump-restore.outcome == 'success' && inputs.is_final_ver == 'true'
env:
migr_mode: ${{ inputs.migration_mode }}
run: |
if [[ "$migr_mode" == "multi-db" ]];then
base_dir=${{ matrix.upgrade-path.path[0] }}
if [[ "$base_dir" == *"latest"* ]]; then
base_dir="latest"
fi
rishabhtanwar29 marked this conversation as resolved.
Show resolved Hide resolved
if [[ '${{ inputs.dump_format }}' == 'plain' ]];then
sudo PGPASSWORD=12345678 ~/${{ inputs.pg_new_dir }}/bin/psql -h localhost -d jdbc_testdb -U jdbc_user --single-transaction -f ~/upgrade/pg_dump.archive 2>> ~/upgrade/error.log
else
base_dir="singledb"
~/${{ inputs.pg_new_dir }}/bin/pg_restore -h localhost -d jdbc_testdb -U jdbc_user --single-transaction ~/upgrade/pg_dump.archive 2>> ~/upgrade/error.log
fi

if [[ '${{ inputs.logical_database }}' != 'null' ]];then
base_dir="${{ inputs.logical_database }}"
fi

# Temporarily disable certain tests until fixed
sed -i "/BABEL-3513/d" test/JDBC/upgrade/$base_dir/schedule
export PATH=/opt/mssql-tools/bin:$PATH
sqlcmd -S localhost -U jdbc_user -P 12345678 -Q "SELECT @@version GO"
shell: bash

- name: Run Verify Tests
Expand Down
8 changes: 6 additions & 2 deletions .github/composite-actions/setup-dump-restore-ca/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,16 @@ runs:

logical_database_var=".\"dump-restore-version\"[${{ matrix.upgrade-path.id }}][$i].\"logical-database\""
logical_database=$(yq $logical_database_var ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml)
dump_data_as_var=".\"dump-restore-version\"[${{ matrix.upgrade-path.id }}][$i].\"dump-data-as\""
dump_data_as=$(yq $dump_data_as_var ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml)
dump_format_var=".\"dump-restore-version\"[${{ matrix.upgrade-path.id }}][$i].\"dump-format\""
dump_format=$(yq $dump_format_var ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml)

if [[ $logical_database == 'null' ]]
then
printf " - name: Dump and Restore to version $dump_restore_version\n id: dump-restore-version-$i\n if: always() $temp\n uses: ${uses_file}\n with: \n engine_branch: ${engine_branch}\n extension_branch: ${extension_branch}\n is_final_ver: ${is_final_ver}\n pg_old_dir: ${pg_old_dir}\n pg_new_dir: ${pg_new_dir}\n migration_mode: 'multi-db'\n\n" >> $dump_restore_version_dir_path/action.yml
printf " - name: Dump and Restore to version $dump_restore_version\n id: dump-restore-version-$i\n if: always() $temp\n uses: ${uses_file}\n with: \n engine_branch: ${engine_branch}\n extension_branch: ${extension_branch}\n is_final_ver: ${is_final_ver}\n pg_old_dir: ${pg_old_dir}\n pg_new_dir: ${pg_new_dir}\n migration_mode: 'multi-db'\n dump_data_as: ${dump_data_as}\n dump_format: ${dump_format}\n\n" >> $dump_restore_version_dir_path/action.yml
else
printf " - name: Dump and Restore to version $dump_restore_version\n id: dump-restore-version-$i\n if: always() $temp\n uses: ${uses_file}\n with: \n engine_branch: ${engine_branch}\n extension_branch: ${extension_branch}\n is_final_ver: ${is_final_ver}\n pg_old_dir: ${pg_old_dir}\n pg_new_dir: ${pg_new_dir}\n migration_mode: 'multi-db'\n logical_database: ${logical_database}\n\n" >> $dump_restore_version_dir_path/action.yml
printf " - name: Dump and Restore to version $dump_restore_version\n id: dump-restore-version-$i\n if: always() $temp\n uses: ${uses_file}\n with: \n engine_branch: ${engine_branch}\n extension_branch: ${extension_branch}\n is_final_ver: ${is_final_ver}\n pg_old_dir: ${pg_old_dir}\n pg_new_dir: ${pg_new_dir}\n migration_mode: 'multi-db'\n logical_database: ${logical_database}\n dump_data_as: ${dump_data_as}\n dump_format: ${dump_format}\n\n" >> $dump_restore_version_dir_path/action.yml
fi

previous_installed_version=$dump_restore_version
Expand Down
38 changes: 37 additions & 1 deletion .github/configuration/dump-restore-test-configuration.yml
Original file line number Diff line number Diff line change
@@ -1,20 +1,56 @@
dump-restore-version: [[
{
version: source.latest,
dump-data-as: copy,
dump-format: plain,
logical-database: null
},
{
version: target.latest,
dump-data-as: copy,
dump-format: plain,
logical-database: null
}
],
[
{
version: source.latest,
dump-data-as: copy,
dump-format: custom,
logical-database: master
},
{
version: target.latest,
logical-database: master
dump-data-as: copy,
dump-format: custom,
logical-database: master
}
],
[
{
version: source.latest,
dump-data-as: inserts,
dump-format: tar,
logical-database: null
},
{
version: target.latest,
dump-data-as: inserts,
dump-format: tar,
logical-database: null
}
],
[
{
version: source.latest,
dump-data-as: inserts,
dump-format: directory,
logical-database: master
},
{
version: target.latest,
dump-data-as: inserts,
dump-format: directory,
logical-database: master
}
]]
10 changes: 8 additions & 2 deletions .github/workflows/pg_dump-restore-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,13 @@ jobs:
run: |
config="'$(yq -o=json ${{ github.workspace }}/.github/configuration/dump-restore-test-configuration.yml)'"
config=$(echo $config | sed "s/\"/\\\\\"/g")
DUMP_RESTORE_PATH_LIST=$(node -e "let k = JSON.parse($config); let p = k['dump-restore-version'].map((itm, index) => ({ id: index, path: itm.map(i => i.version.toString().replace(/[.]/g, \"_\")), title: (itm[itm.length - 1]['logical-database'] == null ? 'Instance-level-' : 'Database-level-') + itm.map(i => i.version.toString().replace(/[.]/g, \"_\")).join(\"-\"), last_version: itm[itm.length - 1].version.toString().replace(/[.]/g, \"_\") })); console.log(JSON.stringify(p));")
DUMP_RESTORE_PATH_LIST=$(node -e "let k = JSON.parse($config); \
let p = k['dump-restore-version'].map((itm, index) => ({ id: index, path: itm.map(i => i.version.toString().replace(/[.]/g, \"_\")), \
title: (itm[itm.length - 1]['logical-database'] == null ? 'Instance-level-' : 'Database-level-') + itm.map(i => i.version.toString().replace(/[.]/g, \"_\")).join(\"-\"), \
last_version: itm[itm.length - 1].version.toString().replace(/[.]/g, \"_\"), \
dump_method: itm[itm.length - 1]['dump-data-as'] == 'copy' ? 'COPY' : 'INSERTS', \
dump_format: itm[itm.length - 1]['dump-format']})); \
console.log(JSON.stringify(p));")
echo "::set-output name=dump-restore-path-list::$DUMP_RESTORE_PATH_LIST"

run-dump-restore-test:
Expand All @@ -24,7 +30,7 @@ jobs:
fail-fast: false
matrix:
upgrade-path: ${{ fromJson(needs.generate-dump-restore-tests.outputs.dump-restore-path-list) }}
name: Run Dump Restore Test for ${{ matrix.upgrade-path.title }}
name: Dump Restore Test using ${{ matrix.upgrade-path.dump_method }} for ${{ matrix.upgrade-path.title }} - format=${{ matrix.upgrade-path.dump_format }}
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v2
Expand Down
15 changes: 10 additions & 5 deletions contrib/babelfishpg_tds/test/t/004_bbfdumprestore.pl
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
my $dump1_file = "$tempdir/dump_all_old.sql";
my $dump2_file = "$tempdir/dump_db_old.sql";
my $dump3_file = "$tempdir/dump_all_new.sql";
my $dump4_file = "$tempdir/dump_db_new.sql";
my $dump4_file = "$tempdir/dump_db_new.custom";

############################################################################################
############################### Test for cross version mode ################################
Expand Down Expand Up @@ -92,6 +92,7 @@
'-d', 'testdb',
'-U', 'test_master',
'-p', $newnode->port,
'--single-transaction',
'-f', $dump1_file,
],
qr/Dump and restore across different Postgres versions is not yet supported./,
Expand All @@ -104,6 +105,7 @@
'-d', 'testdb',
'-U', 'test_master',
'-p', $newnode->port,
'--single-transaction',
'-f', $dump2_file,
],
qr/Dump and restore across different Postgres versions is not yet supported./,
Expand Down Expand Up @@ -136,11 +138,12 @@
'--port', $newnode2->port, '--globals-only', '--quote-all-identifiers',
'--verbose', '--no-role-passwords', '--file', $dump3_file);
$newnode2->command_ok(\@dumpall_command, 'Dump global objects.');
# Dump Babelfish database using pg_dump.
# Dump Babelfish database using pg_dump. Let's dump with the custom format
# this time so that we cover pg_restore as well.
@dump_command = (
'pg_dump', '--username', 'test_master', '--quote-all-identifiers',
'--port', $newnode2->port, '--verbose', '--dbname', 'testdb',
'--file', $dump4_file);
'--format', 'custom', '--file', $dump4_file);
$newnode2->command_ok(\@dump_command, 'Dump Babelfish database.');
$newnode2->stop;

Expand All @@ -155,6 +158,7 @@
'-d', 'testdb',
'-U', 'test_master',
'-p', $newnode->port,
'--single-transaction',
'-f', $dump3_file,
],
qr/Dump and restore across different migration modes is not yet supported./,
Expand All @@ -163,11 +167,12 @@
# Similarly, restore of dump file should also cause a failure.
$newnode->command_fails_like(
[
'psql',
'pg_restore',
'-d', 'testdb',
'-U', 'test_master',
'-p', $newnode->port,
'-f', $dump4_file,
'--single-transaction',
$dump4_file,
],
qr/Dump and restore across different migration modes is not yet supported./,
'Restore of Babelfish database failed since source and target migration modes do not match.');
Expand Down
31 changes: 25 additions & 6 deletions contrib/babelfishpg_tsql/src/hooks.c
Original file line number Diff line number Diff line change
Expand Up @@ -3827,18 +3827,37 @@ fill_missing_values_in_copyfrom(Relation rel, Datum *values, bool *nulls)
relid == namespace_ext_oid ||
relid == bbf_view_def_oid)
{
int16 dbid = 0;
AttrNumber attnum;

attnum = (AttrNumber) attnameAttNum(rel, "dbid", false);
Assert(attnum != InvalidAttrNumber);

if (!nulls[attnum - 1])
return;
if (nulls[attnum - 1])
{
int16 dbid = getDbidForLogicalDbRestore(relid);
values[attnum - 1] = Int16GetDatum(dbid);
nulls[attnum - 1] = false;
}
}

/*
* Populate owner column in babelfish_sysdatabases catalog table with
* current user.
*/
if (relid == sysdatabases_oid)
{
AttrNumber attnum;

attnum = (AttrNumber) attnameAttNum(rel, "owner", false);
Assert(attnum != InvalidAttrNumber);

dbid = getDbidForLogicalDbRestore(relid);
values[attnum - 1] = Int16GetDatum(dbid);
nulls[attnum - 1] = false;
if (nulls[attnum - 1])
{
const char *owner = GetUserNameFromId(GetSessionUserId(), false);
rishabhtanwar29 marked this conversation as resolved.
Show resolved Hide resolved

values[attnum - 1] = CStringGetDatum(owner);
nulls[attnum - 1] = false;
}
}
}

Expand Down
Loading