2021-04-26 20:07:00 +02:00
|
|
|
#coding:utf-8
|
|
|
|
#
|
|
|
|
# id: bugs.core_5833
|
2021-12-09 19:26:42 +01:00
|
|
|
# title: DDL triggers for some object types (views, exceptions, roles, indexes, domains) are lost in backup-restore process
|
|
|
|
# decription:
|
2021-04-26 20:07:00 +02:00
|
|
|
# We create DDL triggers for all cases that are enumerated in $FB_HOME/doc/sql.extensions/README.ddl_triggers.txt.
|
|
|
|
# Then query to RDB$TRIGGERS table is applied to database and its results are stored in <log_file_1>.
|
|
|
|
# After this we do backup and restore to new file, again apply query to RDB$TRIGGERS and store results to <log_file_2>.
|
|
|
|
# Finally we compare <log_file_1> and <log_file_2> but exclude from comparison lines which starts with to 'BLOB_ID'
|
|
|
|
# (these are "prefixes" for RDB$TRIGGER_BLR and RDB$TRIGGER_SOURCE).
|
|
|
|
# Difference should be empty.
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# Confirmed bug on WI-T4.0.0.977 and WI-V3.0.4.32972.
|
|
|
|
# Works fine on:
|
|
|
|
# 30SS, build 3.0.4.32980: OK, 4.656s.
|
|
|
|
# 40SS, build 4.0.0.993: OK, 6.531s.
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# tracker_id: CORE-5833
|
|
|
|
# min_versions: ['3.0']
|
|
|
|
# versions: 3.0
|
|
|
|
# qmid: None
|
|
|
|
|
|
|
|
import pytest
|
2021-12-09 19:26:42 +01:00
|
|
|
from difflib import unified_diff
|
|
|
|
from pathlib import Path
|
|
|
|
from firebird.qa import db_factory, python_act, Action, temp_file
|
2021-04-26 20:07:00 +02:00
|
|
|
|
|
|
|
# version: 3.0
|
|
|
|
# resources: None
|
|
|
|
|
|
|
|
substitutions_1 = []
|
|
|
|
|
|
|
|
init_script_1 = """"""
|
|
|
|
|
|
|
|
db_1 = db_factory(sql_dialect=3, init=init_script_1)
|
|
|
|
|
|
|
|
# test_script_1
|
|
|
|
#---
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# import os
|
|
|
|
# import re
|
|
|
|
# import subprocess
|
|
|
|
# import time
|
|
|
|
# import difflib
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# os.environ["ISC_USER"] = user_name
|
|
|
|
# os.environ["ISC_PASSWORD"] = user_password
|
|
|
|
# db_conn.close()
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# #--------------------------------------------
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# def flush_and_close( file_handle ):
|
|
|
|
# # https://docs.python.org/2/library/os.html#os.fsync
|
2021-12-09 19:26:42 +01:00
|
|
|
# # If you're starting with a Python file object f,
|
|
|
|
# # first do f.flush(), and
|
2021-04-26 20:07:00 +02:00
|
|
|
# # then do os.fsync(f.fileno()), to ensure that all internal buffers associated with f are written to disk.
|
|
|
|
# global os
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# file_handle.flush()
|
|
|
|
# if file_handle.mode not in ('r', 'rb') and file_handle.name != os.devnull:
|
|
|
|
# # otherwise: "OSError: [Errno 9] Bad file descriptor"!
|
|
|
|
# os.fsync(file_handle.fileno())
|
|
|
|
# file_handle.close()
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# #--------------------------------------------
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# def cleanup( f_names_list ):
|
|
|
|
# global os
|
|
|
|
# for i in range(len( f_names_list )):
|
|
|
|
# if type(f_names_list[i]) == file:
|
|
|
|
# del_name = f_names_list[i].name
|
|
|
|
# elif type(f_names_list[i]) == str:
|
|
|
|
# del_name = f_names_list[i]
|
|
|
|
# else:
|
|
|
|
# print('Unrecognized type of element:', f_names_list[i], ' - can not be treated as file.')
|
|
|
|
# del_name = None
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# if del_name and os.path.isfile( del_name ):
|
|
|
|
# os.remove( del_name )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# #--------------------------------------------
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# ddl_list = ''.join(
|
|
|
|
# (
|
|
|
|
# 'CREATE TABLE,ALTER TABLE,DROP TABLE,CREATE PROCEDURE,ALTER PROCEDURE,DROP PROCEDURE'
|
|
|
|
# , ',CREATE FUNCTION,ALTER FUNCTION,DROP FUNCTION,CREATE TRIGGER,ALTER TRIGGER,DROP TRIGGER'
|
|
|
|
# , ',CREATE EXCEPTION,ALTER EXCEPTION,DROP EXCEPTION,CREATE VIEW,ALTER VIEW,DROP VIEW'
|
|
|
|
# , ',CREATE DOMAIN,ALTER DOMAIN,DROP DOMAIN,CREATE ROLE,ALTER ROLE,DROP ROLE'
|
|
|
|
# , ',CREATE SEQUENCE,ALTER SEQUENCE,DROP SEQUENCE,CREATE USER,ALTER USER,DROP USER'
|
|
|
|
# , ',CREATE INDEX,ALTER INDEX,DROP INDEX,CREATE COLLATION,DROP COLLATION,ALTER CHARACTER SET'
|
|
|
|
# , ',CREATE PACKAGE,ALTER PACKAGE,DROP PACKAGE,CREATE PACKAGE BODY,DROP PACKAGE BODY'
|
|
|
|
# )
|
|
|
|
# )
|
|
|
|
# #ddl_list = 'CREATE TABLE,ALTER TABLE,DROP TABLE,CREATE PROCEDURE,ALTER PROCEDURE,DROP PROCEDURE'
|
|
|
|
# #ddl_list += ',CREATE FUNCTION,ALTER FUNCTION,DROP FUNCTION,CREATE TRIGGER,ALTER TRIGGER,DROP TRIGGER'
|
|
|
|
# #ddl_list += ',CREATE EXCEPTION,ALTER EXCEPTION,DROP EXCEPTION,CREATE VIEW,ALTER VIEW,DROP VIEW'
|
|
|
|
# #ddl_list += ',CREATE DOMAIN,ALTER DOMAIN,DROP DOMAIN,CREATE ROLE,ALTER ROLE,DROP ROLE'
|
|
|
|
# #ddl_list += ',CREATE SEQUENCE,ALTER SEQUENCE,DROP SEQUENCE,CREATE USER,ALTER USER,DROP USER'
|
|
|
|
# #ddl_list += ',CREATE INDEX,ALTER INDEX,DROP INDEX,CREATE COLLATION,DROP COLLATION,ALTER CHARACTER SET'
|
|
|
|
# #ddl_list += ',CREATE PACKAGE,ALTER PACKAGE,DROP PACKAGE,CREATE PACKAGE BODY,DROP PACKAGE BODY'
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Initial DDL: create all triggers
|
|
|
|
# ##################################
|
|
|
|
# f_ddl_sql=open( os.path.join(context['temp_directory'],'tmp_ddl_triggers_5833.sql'), 'w')
|
|
|
|
# f_ddl_sql.write('set bail on;\\n')
|
|
|
|
# f_ddl_sql.write('set term ^;\\n')
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# for i in ddl_list.split(','):
|
|
|
|
# for k in (1,2):
|
|
|
|
# evt_time='before' if k==1 else 'after'
|
|
|
|
# f_ddl_sql.write( 'recreate trigger trg_' + evt_time + '_' + i.replace(' ','_').lower() + ' active '+evt_time+' ' + i.lower()+ ' as \\n' )
|
|
|
|
# f_ddl_sql.write( ' declare c rdb$field_name;\\n' )
|
|
|
|
# f_ddl_sql.write( 'begin\\n' )
|
|
|
|
# f_ddl_sql.write( " c = rdb$get_context('DDL_TRIGGER', 'OBJECT_NAME');\\n" )
|
|
|
|
# f_ddl_sql.write( 'end\\n^' )
|
|
|
|
# f_ddl_sql.write( '\\n' )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_ddl_sql.write('set term ;^\\n')
|
|
|
|
# f_ddl_sql.write('commit;\\n')
|
|
|
|
# flush_and_close( f_ddl_sql )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# runProgram('isql', [dsn, '-i', f_ddl_sql.name] )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Prepare check query:
|
|
|
|
# ######################
|
|
|
|
# sql_text=''' set blob all;
|
|
|
|
# set list on;
|
|
|
|
# set count on;
|
|
|
|
# select rdb$trigger_name, rdb$trigger_type, rdb$trigger_source as blob_id_for_trg_source, rdb$trigger_blr as blob_id_for_trg_blr
|
|
|
|
# from rdb$triggers
|
|
|
|
# where rdb$system_flag is distinct from 1
|
|
|
|
# order by 1;
|
|
|
|
# '''
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_chk_sql=open( os.path.join(context['temp_directory'],'tmp_check_trg_5833.sql'), 'w')
|
|
|
|
# f_chk_sql.write( sql_text )
|
|
|
|
# flush_and_close( f_chk_sql )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Query RDB$TRIGGERS before b/r:
|
|
|
|
# ################################
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_xmeta1_log = open( os.path.join(context['temp_directory'],'tmp_xmeta1_5833.log'), 'w')
|
|
|
|
# f_xmeta1_err = open( os.path.join(context['temp_directory'],'tmp_xmeta1_5833.err'), 'w')
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Add to log result of query to rdb$triggers table:
|
|
|
|
# subprocess.call( [context['isql_path'], dsn, "-i", f_chk_sql.name],
|
|
|
|
# stdout = f_xmeta1_log,
|
|
|
|
# stderr = f_xmeta1_err
|
|
|
|
# )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# flush_and_close( f_xmeta1_log )
|
|
|
|
# flush_and_close( f_xmeta1_err )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Do backup and restore into temp file:
|
|
|
|
# #######################################
|
|
|
|
# tmp_bkup=os.path.join(context['temp_directory'],'tmp_backup_5833.fbk')
|
|
|
|
# tmp_rest=os.path.join(context['temp_directory'],'tmp_restored_5833.fdb')
|
|
|
|
# if os.path.isfile(tmp_rest):
|
|
|
|
# os.remove(tmp_rest)
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# runProgram('gbak', ['-b', dsn, tmp_bkup ] )
|
|
|
|
# runProgram('gbak', ['-c', tmp_bkup, 'localhost:'+tmp_rest ] )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Query RDB$TRIGGERS after b/r:
|
|
|
|
# ###############################
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_xmeta2_log = open( os.path.join(context['temp_directory'],'tmp_xmeta2_5833.log'), 'w')
|
|
|
|
# f_xmeta2_err = open( os.path.join(context['temp_directory'],'tmp_xmeta2_5833.err'), 'w')
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# subprocess.call( [context['isql_path'], 'localhost:'+tmp_rest, "-i", f_chk_sql.name],
|
|
|
|
# stdout = f_xmeta2_log,
|
|
|
|
# stderr = f_xmeta2_err
|
|
|
|
# )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# flush_and_close( f_xmeta2_log )
|
|
|
|
# flush_and_close( f_xmeta2_err )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # Every STDERR log should be EMPTY:
|
|
|
|
# ###################################
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_list = ( f_xmeta1_err, f_xmeta2_err )
|
|
|
|
# for i in range(len(f_list)):
|
|
|
|
# f_name=f_list[i].name
|
|
|
|
# if os.path.getsize(f_name) > 0:
|
|
|
|
# with open( f_name,'r') as f:
|
|
|
|
# for line in f:
|
|
|
|
# print("Unexpected STDERR, file "+f_name+": "+line)
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # DIFFERENCE between f_xmeta1_log and f_xmeta2_log should be EMPTY:
|
|
|
|
# ####################
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# old_rdb_triggers_data=open(f_xmeta1_log.name, 'r')
|
|
|
|
# new_rdb_triggers_data=open(f_xmeta2_log.name, 'r')
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # NB: we should EXCLUDE from comparison lines which about to BLOB IDs for records:
|
|
|
|
# # ~~~~~~~~~~~~~~~~~~~~~
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# difftext = ''.join(difflib.unified_diff(
|
2021-12-09 19:26:42 +01:00
|
|
|
# [ line for line in old_rdb_triggers_data.readlines() if not line.startswith('BLOB_ID_FOR_TRG') ],
|
2021-04-26 20:07:00 +02:00
|
|
|
# [ line for line in new_rdb_triggers_data.readlines() if not line.startswith('BLOB_ID_FOR_TRG') ]
|
|
|
|
# ))
|
|
|
|
# old_rdb_triggers_data.close()
|
|
|
|
# new_rdb_triggers_data.close()
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# f_diff_txt=open( os.path.join(context['temp_directory'],'tmp_5833_metadata_diff.txt'), 'w')
|
|
|
|
# f_diff_txt.write(difftext)
|
|
|
|
# flush_and_close( f_diff_txt )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# with open( f_diff_txt.name,'r') as f:
|
|
|
|
# for line in f:
|
|
|
|
# print("Unexpected DIFF in metadata: "+line)
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
# # CLEANUP
|
|
|
|
# #########
|
|
|
|
# time.sleep(1)
|
|
|
|
# cleanup( (f_ddl_sql, f_chk_sql, f_xmeta1_log, f_xmeta1_err, f_xmeta2_log, f_xmeta2_err, f_diff_txt, tmp_bkup, tmp_rest) )
|
2021-12-09 19:26:42 +01:00
|
|
|
#
|
2021-04-26 20:07:00 +02:00
|
|
|
#---
|
|
|
|
|
2021-12-09 19:26:42 +01:00
|
|
|
act_1 = python_act('db_1', substitutions=substitutions_1)
|
2021-04-26 20:07:00 +02:00
|
|
|
|
2021-12-09 19:26:42 +01:00
|
|
|
ddl_list = ['CREATE TABLE', 'ALTER TABLE', 'DROP TABLE',
|
|
|
|
'CREATE PROCEDURE', 'ALTER PROCEDURE', 'DROP PROCEDURE',
|
|
|
|
'CREATE FUNCTION', 'ALTER FUNCTION', 'DROP FUNCTION',
|
|
|
|
'CREATE TRIGGER', 'ALTER TRIGGER', 'DROP TRIGGER',
|
|
|
|
'CREATE EXCEPTION', 'ALTER EXCEPTION', 'DROP EXCEPTION',
|
|
|
|
'CREATE VIEW', 'ALTER VIEW', 'DROP VIEW',
|
|
|
|
'CREATE DOMAIN', 'ALTER DOMAIN', 'DROP DOMAIN',
|
|
|
|
'CREATE ROLE', 'ALTER ROLE', 'DROP ROLE',
|
|
|
|
'CREATE SEQUENCE', 'ALTER SEQUENCE', 'DROP SEQUENCE',
|
|
|
|
'CREATE USER', 'ALTER USER', 'DROP USER',
|
|
|
|
'CREATE INDEX', 'ALTER INDEX', 'DROP INDEX',
|
|
|
|
'CREATE COLLATION', 'DROP COLLATION', 'ALTER CHARACTER SET',
|
|
|
|
'CREATE PACKAGE', 'ALTER PACKAGE', 'DROP PACKAGE',
|
|
|
|
'CREATE PACKAGE BODY', 'DROP PACKAGE BODY']
|
2021-04-26 20:07:00 +02:00
|
|
|
|
2021-12-09 19:26:42 +01:00
|
|
|
test_script_1 = """
|
|
|
|
set blob all;
|
|
|
|
set list on;
|
|
|
|
set count on;
|
|
|
|
select rdb$trigger_name, rdb$trigger_type, rdb$trigger_source as blob_id_for_trg_source, rdb$trigger_blr as blob_id_for_trg_blr
|
|
|
|
from rdb$triggers
|
|
|
|
where rdb$system_flag is distinct from 1
|
|
|
|
order by 1;
|
|
|
|
"""
|
2021-04-26 20:07:00 +02:00
|
|
|
|
2021-12-09 19:26:42 +01:00
|
|
|
fbk_file = temp_file('tmp_5833.fbk')
|
|
|
|
fdb_file = temp_file('tmp_5833.fdb')
|
|
|
|
|
|
|
|
@pytest.mark.version('>=3.0')
|
|
|
|
def test_1(act_1: Action, fbk_file: Path, fdb_file: Path):
|
|
|
|
script = ['set bail on;', 'set term ^;']
|
|
|
|
# Initial DDL: create all triggers
|
|
|
|
for item in ddl_list:
|
|
|
|
for evt_time in ['before', 'after']:
|
|
|
|
script.append(f"recreate trigger trg_{evt_time}_{item.replace(' ', '_').lower()} active {evt_time} {item.lower()} as")
|
|
|
|
script.append(" declare c rdb$field_name;")
|
|
|
|
script.append("begin")
|
|
|
|
script.append(" c = rdb$get_context('DDL_TRIGGER', 'OBJECT_NAME');")
|
|
|
|
script.append("end ^")
|
|
|
|
script.append("")
|
|
|
|
script.append("set term ;^")
|
|
|
|
script.append("commit;")
|
|
|
|
act_1.isql(switches=[], input='\n'.join(script))
|
|
|
|
# Query RDB$TRIGGERS before b/r:
|
|
|
|
act_1.reset()
|
|
|
|
act_1.isql(switches=[], input=test_script_1)
|
2022-01-16 10:03:34 +01:00
|
|
|
meta_before = [line for line in act_1.clean_stdout.splitlines() if not line.startswith('BLOB_ID_FOR_TRG')]
|
2021-12-09 19:26:42 +01:00
|
|
|
# B/S
|
|
|
|
act_1.reset()
|
|
|
|
act_1.gbak(switches=['-b', act_1.db.dsn, str(fbk_file)])
|
|
|
|
act_1.reset()
|
2021-12-14 20:56:34 +01:00
|
|
|
act_1.gbak(switches=['-c', str(fbk_file), act_1.get_dsn(fdb_file)])
|
2021-12-09 19:26:42 +01:00
|
|
|
# Query RDB$TRIGGERS after b/r:
|
|
|
|
act_1.reset()
|
2021-12-14 20:56:34 +01:00
|
|
|
act_1.isql(switches=[act_1.get_dsn(fdb_file)], input=test_script_1, connect_db=False)
|
2022-01-16 10:03:34 +01:00
|
|
|
meta_after = [line for line in act_1.clean_stdout.splitlines() if not line.startswith('BLOB_ID_FOR_TRG')]
|
2021-12-09 19:26:42 +01:00
|
|
|
# Check
|
|
|
|
assert list(unified_diff(meta_before, meta_after)) == []
|