2021-04-26 20:07:00 +02:00
#coding:utf-8
#
# id: bugs.core_4855
# title: Online validation during DML activity in other connection leads to message "Error while trying to read from file" and "page in use during flush (210), file: cch.cpp line: 2672"
2021-11-26 19:20:43 +01:00
# decription:
2021-04-26 20:07:00 +02:00
# In order to check ticket issues this test does following:
# 1. Change on test database FW to OFF - this will increase DML performance.
# 2. Create two tables: one for inserting rows ('test') and second to serve as 'signal' to stop DML:
# inserts will be done until 2nd table (with name = 'stop') is empty.
2021-11-26 19:20:43 +01:00
# 3. Adds to SQL script DML (execute block) that will be used in ISQL session #1: it inserts rows
# into 'test' and checks after each inserting table 'stop' on presence there at least one row.
2021-04-26 20:07:00 +02:00
# This 'stop-row' will be inserted into 'stop' table in another ISQL session.
# 4. Launches ISQL connection #1 in separate (child) process. This ISQL will start 'heavy DML'.
# 5. Proceeds several online-validation actions by using synchronous call of 'FBSVCMGR action_validate'.
# Adds result of each validation to log.
# 6. Launches ISQL connection #2 in separate (child) process and give to this session trivial job:
2021-11-26 19:20:43 +01:00
# 'insert into stop(id) values(1); commit;'. This will cause ISQL session #1 to stop its activity
2021-04-26 20:07:00 +02:00
# because it runs in transaction with TIL = RC.
# 7. Outputs log of ISQL-1 and online validation results.
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# Tested on WI-V3.0.0.32008, SS, SC and CS. Result: OK.
2021-11-26 19:20:43 +01:00
# Updated since WI-V3.0.0.32064: reduced key-length from extremely huge (1000) to "normal" value 36,
2021-04-26 20:07:00 +02:00
# otherwise get 'Statement failed, SQLSTATE = 54000 / Implementation limit exceeded / -Maximum index level reached'.
# (since CORE-4914 was fixed, see: http://sourceforge.net/p/firebird/code/62266 )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# tracker_id: CORE-4855
# min_versions: ['3.0']
# versions: 3.0
# qmid: None
import pytest
2021-11-26 19:20:43 +01:00
import subprocess
import time
from pathlib import Path
from firebird . qa import db_factory , python_act , Action , temp_file
2021-04-26 20:07:00 +02:00
# version: 3.0
# resources: None
2021-11-26 19:20:43 +01:00
substitutions_1 = [ ( ' [0-9][0-9]:[0-9][0-9]:[0-9][0-9].[0-9][0-9] ' , ' ' ) ,
( ' Relation [0-9] { 3,4} ' , ' Relation ' ) ,
( ' Statement failed, SQLSTATE = HY008 ' , ' ' ) ,
( ' operation was cancelled ' , ' ' ) , ( ' After line .* ' , ' ' ) ]
2021-04-26 20:07:00 +02:00
init_script_1 = """ """
db_1 = db_factory ( page_size = 4096 , sql_dialect = 3 , init = init_script_1 )
# test_script_1
#---
2021-11-26 19:20:43 +01:00
#
#
2021-04-26 20:07:00 +02:00
# import os
# import subprocess
# from subprocess import Popen
# #import signal
# import time
# from fdb import services
2021-11-26 19:20:43 +01:00
#
#
2021-04-26 20:07:00 +02:00
# os.environ["ISC_USER"] = user_name
# os.environ["ISC_PASSWORD"] = user_password
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# # Obtain engine version:
# engine = str(db_conn.engine_version) # convert to text because 'float' object has no attribute 'startswith'
# dbname=db_conn.database_name
# db_conn.close()
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #--------------------------------------------
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# def flush_and_close(file_handle):
# # https://docs.python.org/2/library/os.html#os.fsync
2021-11-26 19:20:43 +01:00
# # If you're starting with a Python file object f,
# # first do f.flush(), and
2021-04-26 20:07:00 +02:00
# # then do os.fsync(f.fileno()), to ensure that all internal buffers associated with f are written to disk.
# global os
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# file_handle.flush()
# if file_handle.mode not in ('r', 'rb'):
# # otherwise: "OSError: [Errno 9] Bad file descriptor"!
# os.fsync(file_handle.fileno())
# file_handle.close()
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #--------------------------------------------
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# def cleanup( f_names_list ):
# global os
# for i in range(len( f_names_list )):
# if type(f_names_list[i]) == file:
# del_name = f_names_list[i].name
# elif type(f_names_list[i]) == str:
# del_name = f_names_list[i]
# else:
# print('Unrecognized type of element:', f_names_list[i], ' - can not be treated as file.')
# del_name = None
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# if del_name and os.path.isfile( del_name ):
# os.remove( del_name )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #--------------------------------------------
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #####################################################################
# # Change database FW to OFF in order to increase speed of insertions and output its header info:
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# fwoff_log=open( os.path.join(context['temp_directory'],'tmp_fw_off_4855.log'), 'w')
# subprocess.call([ context['fbsvcmgr_path'], "localhost:service_mgr",
# "action_properties",
# "prp_write_mode", "prp_wm_async",
# "dbname", dbname ],
# stdout=fwoff_log, stderr=subprocess.STDOUT)
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# fwoff_log.seek(0,2)
# subprocess.call([ context['fbsvcmgr_path'], "localhost:service_mgr",
# "action_db_stats",
# "dbname", dbname, "sts_hdr_pages"],
# stdout=fwoff_log, stderr=subprocess.STDOUT)
# flush_and_close( fwoff_log )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #####################################################################
# # Preparing script for ISQL that will do 'heavy DML':
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# sql_cmd='''
# recreate sequence g;
# recreate table test(id int, s varchar( 36 ) unique using index test_s_unq);
# recreate table stop(id int);
2021-11-26 19:20:43 +01:00
# commit;
2021-04-26 20:07:00 +02:00
# set list on;
# set transaction read committed;
# set term ^;
# execute block returns( inserted_rows varchar(20) ) as
# begin
# while ( not exists(select * from stop) ) do
# begin
# insert into test(id, s) values( gen_id(g,1), rpad('', 36, uuid_to_char(gen_uuid())) );
# end
# inserted_rows = iif( gen_id(g,0) > 0, 'OK, LOT OF.', 'FAIL: ZERO!');
# suspend;
# end
# ^
# set term ;^
# commit;
# '''
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# f_heavy_dml_cmd=open( os.path.join(context['temp_directory'],'tmp_isql_4855.sql'), 'w')
# f_heavy_dml_cmd.write(sql_cmd)
# flush_and_close( f_heavy_dml_cmd )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #####################################################################
2021-11-26 19:20:43 +01:00
# # Starting ISQL in separate process with doing 'heavy DML' (bulk-inserts) until table 'stop'
2021-04-26 20:07:00 +02:00
# # remains empty (this table will get one row in separate ISQL session, see below p_stopper):
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# f_heavy_dml_log=open( os.path.join(context['temp_directory'],'tmp_isql_4855.log'), 'w')
# p_heavy_dml = Popen([ context['isql_path'] , dsn, "-i", f_heavy_dml_cmd.name ], stdout=f_heavy_dml_log, stderr=subprocess.STDOUT)
2021-11-26 19:20:43 +01:00
#
# # Here we have to wait for sure that ISQL could establish its connect and starts DML
2021-04-26 20:07:00 +02:00
# # before we will run online-validation:
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# # 16.03.2016: increased time delay because under some circumstances ISQL could not establish connect
# # and this lead validation to start verify table TEST (which was not expected).
# # Detected many times on CS/SC.
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# time.sleep(4)
2021-11-26 19:20:43 +01:00
#
#
2021-04-26 20:07:00 +02:00
# #####################################################################
# # Doing online-validation.
2021-11-26 19:20:43 +01:00
# # Use subprocess.call() with waiting in main thread for it will finish.
#
2021-04-26 20:07:00 +02:00
# val_log=open( os.path.join(context['temp_directory'],'tmp_onval_4855.log'), 'w')
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# val_log.write('Iteration #1:\\n')
# val_log.seek(0,2)
# subprocess.call( [ context['fbsvcmgr_path'], "localhost:service_mgr",
# "action_validate","val_lock_timeout","1",
# "dbname",dbname
# ],
# stdout=val_log, stderr=subprocess.STDOUT
# )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# time.sleep(2)
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# # Iteration #2:
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# val_log.seek(0,2)
# val_log.write('\\n\\nIteration #2:\\n')
# val_log.seek(0,2)
# subprocess.call([context['fbsvcmgr_path'], "localhost:service_mgr",
# "action_validate","val_lock_timeout","1",
# "dbname",dbname],
# stdout=val_log, stderr=subprocess.STDOUT)
# flush_and_close( val_log )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# #####################################################################
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# # Stopping ISQL that is doing now 'heavy DML' (bulk-inserts):
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# f_stopper_cmd=open( os.path.join(context['temp_directory'],'tmp_stop_4855.sql'), 'w')
# f_stopper_cmd.write('insert into stop(id) values(1); commit;')
# flush_and_close( f_stopper_cmd )
# p_stopper = subprocess.call([ context['isql_path'], dsn, "-i", f_stopper_cmd.name])
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# # Stop working ISQL. NB: in rare cases this can lead to:
# # + Statement failed, SQLSTATE = HY008
# # + operation was cancelled
# # + After line ... in file .../tmp_isql_4855.sql
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# p_heavy_dml.terminate()
# flush_and_close( f_heavy_dml_log )
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# with open( f_heavy_dml_log.name,'r') as f:
# print(f.read())
2021-11-26 19:20:43 +01:00
#
2021-04-26 20:07:00 +02:00
# with open( val_log.name,'r') as f:
# print(f.read())
2021-11-26 19:20:43 +01:00
#
#
2021-04-26 20:07:00 +02:00
# #####################################################################
# # Cleanup:
# time.sleep(1)
# cleanup( (fwoff_log, val_log, f_heavy_dml_cmd, f_heavy_dml_log, f_stopper_cmd) )
2021-11-26 19:20:43 +01:00
#
#
#
2021-04-26 20:07:00 +02:00
#---
2021-11-26 19:20:43 +01:00
act_1 = python_act ( ' db_1 ' , substitutions = substitutions_1 )
2021-04-26 20:07:00 +02:00
expected_stdout_1 = """
Iteration #1:
21 : 16 : 28.31 Validation started
21 : 16 : 28.31 Relation 128 ( TEST )
21 : 16 : 29.31 Acquire relation lock failed
21 : 16 : 29.31 Relation 128 ( TEST ) : 1 ERRORS found
21 : 16 : 30.04 Relation 129 ( STOP )
21 : 16 : 30.04 process pointer page 0 of 1
21 : 16 : 30.04 Relation 129 ( STOP ) is ok
21 : 16 : 30.04 Validation finished
Iteration #2:
21 : 16 : 32.46 Validation started
21 : 16 : 32.46 Relation 128 ( TEST )
21 : 16 : 33.46 Acquire relation lock failed
21 : 16 : 33.46 Relation 128 ( TEST ) : 1 ERRORS found
21 : 16 : 35.09 Relation 129 ( STOP )
21 : 16 : 35.09 process pointer page 0 of 1
21 : 16 : 35.09 Relation 129 ( STOP ) is ok
21 : 16 : 35.09 Validation finished
2021-11-26 19:20:43 +01:00
INSERTED_ROWS OK , LOT OF .
"""
2021-04-26 20:07:00 +02:00
2021-11-26 19:20:43 +01:00
heavy_script_1 = temp_file ( ' heavy_script.sql ' )
heavy_output_1 = temp_file ( ' heavy_script.out ' )
2021-04-26 20:07:00 +02:00
2021-11-26 19:20:43 +01:00
@pytest.mark.version ( ' >=3.0 ' )
def test_1 ( act_1 : Action , heavy_script_1 : Path , heavy_output_1 : Path , capsys ) :
# Change database FW to OFF in order to increase speed of insertions and output its header info
2021-12-09 19:26:42 +01:00
act_1 . db . set_async_write ( )
2021-11-26 19:20:43 +01:00
# Preparing script for ISQL that will do 'heavy DML'
heavy_script_1 . write_text ( """
recreate sequence g ;
recreate table test ( id int , s varchar ( 36 ) unique using index test_s_unq ) ;
recreate table stop ( id int ) ;
commit ;
set list on ;
set transaction read committed ;
set term ^ ;
execute block returns ( inserted_rows varchar ( 20 ) ) as
begin
while ( not exists ( select * from stop ) ) do
begin
insert into test ( id , s ) values ( gen_id ( g , 1 ) , rpad ( ' ' , 36 , uuid_to_char ( gen_uuid ( ) ) ) ) ;
end
inserted_rows = iif ( gen_id ( g , 0 ) > 0 , ' OK, LOT OF. ' , ' FAIL: ZERO! ' ) ;
suspend ;
end
^
set term ; ^
commit ;
""" )
with open ( heavy_output_1 , mode = ' w ' ) as heavy_out :
p_heavy_sql = subprocess . Popen ( [ act_1 . vars [ ' isql ' ] , ' -i ' , str ( heavy_script_1 ) ,
' -user ' , act_1 . db . user ,
' -password ' , act_1 . db . password , act_1 . db . dsn ] ,
stdout = heavy_out , stderr = subprocess . STDOUT )
try :
time . sleep ( 4 )
# Run validation twice
with act_1 . connect_server ( ) as srv :
print ( ' Iteration #1: ' )
2021-12-07 13:36:20 +01:00
srv . database . validate ( database = act_1 . db . db_path , lock_timeout = 1 ,
2021-11-26 19:20:43 +01:00
callback = print )
print ( ' Iteration #2: ' )
2021-12-07 13:36:20 +01:00
srv . database . validate ( database = act_1 . db . db_path , lock_timeout = 1 ,
2021-11-26 19:20:43 +01:00
callback = print )
# Stopping ISQL that is doing now 'heavy DML' (bulk-inserts):
act_1 . isql ( switches = [ ] , input = ' insert into stop(id) values(1); commit; ' )
finally :
p_heavy_sql . terminate ( )
#
print ( heavy_output_1 . read_text ( ) )
# Check
act_1 . reset ( )
act_1 . expected_stdout = expected_stdout_1
act_1 . stdout = capsys . readouterr ( ) . out
assert act_1 . clean_stdout == act_1 . clean_expected_stdout
2021-04-26 20:07:00 +02:00