6
0
mirror of https://github.com/FirebirdSQL/firebird-qa.git synced 2025-01-22 13:33:07 +01:00
firebird-qa/tests/bugs/gh_8263_test.py

145 lines
5.8 KiB
Python
Raw Normal View History

#coding:utf-8
"""
ID: issue-8263
ISSUE: https://github.com/FirebirdSQL/firebird/issues/8263
TITLE: gbak on Classic with ParallelWorkers > 1 doesn't restore indices, giving a cryptic error message
DESCRIPTION:
Following conditions must be met to reproduce ticket issue:
* firebird.conf contains ParallelWorkers > 1 and MaxParallelWorkers >= ParallelWorkers;
* test database has a table with indexed column and number of POINTER PAGES more than 1.
Test creates table with PK-column (type = int) and wide text column ('data_filler') of len = <DATA_FILLER_WID>.
We add <ROWS_COUNT> records into this table and make backup of this table.
Set ROWS_COUNT = 50'000 and DATA_FILLER_WID = 1'000 leads to allocating 5 pointer pages for test table
(database must be created with page_size = 8192).
Then we run restore WITHOUT '-par' switch and without verbosing.
Before fix this issued error described in the ticket and index remained inactive.
After fix this restore must complete silently (w/o any output).
Finally, we run query that must use PK index and compare its explained plan with expected which must contain
'Index "..." Full Scan' line.
NOTES:
[28.09.2024] pzotov
::: NB :::
This test forced to change prototypes of firebird.conf for 5.x and 6.x, see in $QA_HOME/firebird-qa/configs/
files 'fb50_all.conf' and 'fb60_all.conf': they now contain ParallelWorkers > 1.
This change may affect on entire QA run result! Some other tests may need to be adjusted after this.
Thanks to Vlad for suggestions about this test implementation.
Confirmed bug on 6.0.0.471, 5.0.2.1516
Checked on 6.0.0.474, 5.0.2.1519 -- all Ok.
"""
import locale
from pathlib import Path
import time
import pytest
from firebird.qa import *
from firebird.driver import driver_config, connect, DatabaseError, SrvRestoreFlag
tmp_fbk = temp_file('tmp_gh_8263.fbk')
tmp_fdb = temp_file('tmp_gh_8263_restored.tmp')
#########################
### S E T T I N G S ###
#########################
ROWS_COUNT = 50000
DATA_FILLER_WID = 1000
init_script = f"""
create table test(
id int generated by default as identity -- constraint pk_wares primary key using index test_pk
,data_filler varchar({DATA_FILLER_WID})
);
commit;
set term ^;
execute block as
declare n int = {ROWS_COUNT};
begin
while (n>0) do
begin
insert into test(data_filler) values( lpad('', {DATA_FILLER_WID}, uuid_to_char(gen_uuid())) );
n = n - 1;
end
end
^
set term ;^
commit;
create index test_id on test(id);
commit;
"""
db = db_factory(init = init_script, page_size = 8192)
act = python_act('db')
#-----------------------------------------------------------
def replace_leading(source, char="."):
stripped = source.lstrip()
return char * (len(source) - len(stripped)) + stripped
#-----------------------------------------------------------
@pytest.mark.version('>=5.0')
def test_1(act: Action, tmp_fbk: Path, tmp_fdb: Path, capsys):
if act.vars['server-arch'].lower() != 'classic':
pytest.skip('Only Classic was affected.')
#srv_cfg = driver_config.register_server(name = f'srv_cfg_8263', config = '')
#db_cfg_name = f'db_cfg_8263'
#db_cfg_object = driver_config.register_database(name = db_cfg_name)
#db_cfg_object.server.value = srv_cfg.name
#db_cfg_object.database.value = str(act.db.db_path)
## db_cfg_object.parallel_workers.value = 3
with act.db.connect() as con: # connect(db_cfg_name, user = act.db.user, password = act.db.password) as con:
chk_sql = """
select
max(iif(rdb$config_name = 'ServerMode', rdb$config_value, null)) as srv_mode
,cast(max(iif(rdb$config_name = 'ParallelWorkers', rdb$config_value, null)) as int) as par_workers
,cast(max(iif(rdb$config_name = 'MaxParallelWorkers', rdb$config_value, null)) as int) as max_workers
from rdb$config
"""
cur = con.cursor()
cur.execute(chk_sql)
srv_mode, par_workers, max_workers = cur.fetchone()
assert srv_mode == 'Classic', f'Not applicable ServerMode: {srv_mode}'
assert par_workers > 1, f'ParallelWorkers = {par_workers} must be greater than 1 for this test'
assert max_workers > par_workers, f'MaxParallelWorkers = {maxworkers} must be greater than ParallelWorkers = {par_workers}'
act.gfix(switches=['-shutdown','single', '-force', '0', act.db.dsn])
print(act.stdout) # must be empty
act.gbak(switches=['-b', act.db.dsn, str(tmp_fbk)], combine_output = True, io_enc = locale.getpreferredencoding())
print(act.stdout) # must be empty
# BEFORE fix following restore failed with:
# gbak:cannot commit index TEST_ID
# gbak: ERROR:invalid database handle (no active connection)
# gbak: ERROR:Database is not online due to failure to activate one or more indices.
# gbak: ERROR: Run gfix -online to bring database online without active indices.
#
act.gbak(switches=['-rep', str(tmp_fbk), str(tmp_fdb)], combine_output = True, io_enc = locale.getpreferredencoding())
print(act.stdout) # must be empty!
###############################################################
with connect(str(tmp_fdb), user = act.db.user, password = act.db.password) as con:
chk_sql = 'select 1 from test order by id'
cur = con.cursor()
ps = cur.prepare(chk_sql)
# Print explained plan with padding eash line by dots in order to see indentations:
print( '\n'.join([replace_leading(s) for s in ps.detailed_plan.split('\n')]) )
act.expected_stdout = f"""
Select Expression
....-> Table "TEST" Access By ID
........-> Index "TEST_ID" Full Scan
"""
act.stdout = capsys.readouterr().out
assert act.clean_stdout == act.clean_expected_stdout