1+ import re
2+ import uuid
3+
14import pyodbc # adapter for SQL Server
25import psycopg2 # adapter for PostgreSQL
36import configparser # used to read settings from file
47import datetime # used to properly format dates and datetimes
58import time # used to calculate time taken
69
10+ # This script was created with global variables without initializing them here. Ideally, we should store a settings
11+ # object rather than all of them separately but this works.
12+ settings = None
13+ EXCLUDED_COLUMNS = ["RowID" ]
14+ delete_data = False
15+ historical = False
16+ demo_fix = False
17+ migration_modules = []
18+ old_connection = None
19+ new_connection = None
20+ today = datetime .date .today ()
21+ now = datetime .datetime .now ()
22+
723
824# loads connection configuration and migration settings from a file.
925# In future the settings file could be specified with a parameter.
@@ -29,16 +45,20 @@ def get_settings_from_file():
2945 print (" Histrical data will be migrated to the new database." )
3046 else :
3147 print (" No historical data will be migrated." )
32- global today
33- global now
34- today = datetime .date .today ()
35- now = datetime .datetime .now ()
3648 except KeyError as e :
3749 print ("\x1b [0;31;48m" + "Error while trying to load settings. " + \
3850 "Please make sure the settings.ini file exists in your working directory." + "\x1b [0m" )
3951 exit (1 )
4052
4153
54+ def is_uuid (value ):
55+ try :
56+ uuid .UUID (value , version = 4 )
57+ return True
58+ except ValueError :
59+ return False
60+
61+
4262# tries to connect to both databases
4363def connect ():
4464 print ("Setting up connection to the databases:" )
@@ -50,8 +70,9 @@ def connect():
5070 global old_connection
5171 old_connection = pyodbc .connect (old_connection_string )
5272 except pyodbc .InterfaceError as exc :
53- print ("\x1b [0;31;48m" +
54- "ERROR: Could not connect to the SQL Server database. Make sure the server is running and check your settings." +
73+ print ("\x1b [0;31;48m"
74+ "ERROR: Could not connect to the SQL Server database. "
75+ "Make sure the server is running and check your settings."
5576 "\x1b [0m" )
5677 print (exc )
5778 exit (1 )
@@ -61,13 +82,15 @@ def connect():
6182 new_db = settings ["NewDB" ]
6283 new_connection_string = f'host={ new_db ["host" ]} port={ new_db ["port" ]} dbname={ new_db ["name" ]} ' \
6384 f'user={ new_db ["user" ]} password={ new_db ["pwd" ]} '
64- new_connection_string = f'postgres://{ new_db ["user" ]} @{ new_db ["host" ]} :{ new_db ["port" ]} /{ new_db ["name" ]} '
85+ # new_connection_string = \
86+ # f'postgres://{new_db["user"]}:{new_db["pwd"]}@{new_db["host"]}:{new_db["port"]}/{new_db["name"]}'
6587 try :
6688 global new_connection
6789 new_connection = psycopg2 .connect (new_connection_string )
6890 except psycopg2 .OperationalError as exc :
69- print ("\x1b [0;31;48m" +
70- "ERROR: Could not connect to the PostgreSQL database. Make sure the server is running and check your settings." +
91+ print ("\x1b [0;31;48m"
92+ "ERROR: Could not connect to the PostgreSQL database. "
93+ "Make sure the server is running and check your settings."
7194 "\x1b [0m" )
7295 print (exc )
7396 exit (1 )
@@ -108,43 +131,38 @@ def get_db_tables():
108131 print ("Finding tables in both databases.\n " )
109132 old_cursor .execute ("SELECT TABLE_NAME FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE = 'BASE TABLE';" )
110133 new_cursor .execute ("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public';" )
111- old_tables = list ()
112- for x in old_cursor :
113- # Remove special characters at the start and end of each item when adding it to the list.
114- # This way the entries in the old and new list match
115- old_tables .append (str (x )[2 :- 4 ])
116- new_tables = list ()
117- for x in new_cursor :
118- # Remove special characters at the start and end of each item when adding it to the list.
119- # This way the entries in the old and new list match
120- new_tables .append (str (x )[2 :- 3 ])
134+ old_tables = [x [0 ] for x in old_cursor ]
135+ new_tables = [x [0 ] for x in new_cursor ]
121136 return old_tables , new_tables
122137
123138
124139# This function puts the data from a SELECT statement into string and formats it correctly so that postgres can work
125140# with it.
126141def generate_insertion_string (row ):
127- row_str = "("
142+ row_list = []
128143 for x in row :
129144 # Strings must be enclosed in apostrophes, also escape singe quotes in a string by doubling them
130145 if isinstance (x , str ):
131- row_str = row_str + "'" + str (x ).replace ("'" , "''" ) + "', "
146+ # The .NET webapp used to create uppercase UUIDs, so we try to detect it and lowercase it
147+ if 32 <= len (x ) <= 36 and is_uuid (x ):
148+ x = x .lower ()
149+ row_list .append ("'" + str (x ).replace ("'" , "''" ) + "'" )
132150 # Dates and datetimes must be enclosed in apostrophes
133151 elif isinstance (x , datetime .datetime ) or isinstance (x , datetime .date ):
134- row_str = row_str + "'" + str (x ) + "', "
135- # If x is NoneType then str(x) get transtlated to "None", but sql wants "null"
152+ row_list . append ( "'" + str (x ) + "'" )
153+ # If x is NoneType then str(x) get translated to "None", but sql wants "null"
136154 elif x is None :
137- row_str = row_str + "null, "
155+ row_list . append ( "null" )
138156 # If x is bytes we need to make them nice (start with \x and append the data converted to hex):
139157 elif isinstance (x , bytes ):
140- row_str = row_str + "'\\ x" + str (x .hex ()) + "', "
158+ row_list . append ( "'\\ x" + str (x .hex ()) + "'" )
141159 else :
142- row_str = row_str + str (x ) + ", "
143- row_str = row_str [: - 2 ] + " )"
160+ row_list . append ( str (x ))
161+ row_str = f"( { ', ' . join ( row_list ) } )"
144162 return row_str
145163
146164
147- # When not migrating historical data, this function figures out what colums "ValidityTo" is so we can later check for
165+ # When not migrating historical data, this function figures out what columns "ValidityTo" is so we can later check for
148166# each row if it is still valid or already historical
149167def get_validity_index (rows ):
150168 vi = - 1
@@ -164,6 +182,8 @@ def get_validity_index(rows):
164182
165183
166184def get_validity (vi , row ):
185+ global today
186+ global now
167187 if historical or ((not historical ) and vi == - 1 ):
168188 return True
169189 elif (not historical ) and vi != - 1 :
@@ -185,6 +205,17 @@ def get_validity(vi, row):
185205 return True
186206
187207
208+ def extract_sequence_name (column_default ):
209+ if not column_default :
210+ return None
211+ pattern = r"nextval\('([^']*)"
212+ match = re .search (pattern , column_default )
213+ if match :
214+ return match .group (1 )
215+ else :
216+ return None
217+
218+
188219def migrate ():
189220 # This list collects all db tables that exist only in one of the databases but not the other.
190221 lonely_tables = list ()
@@ -206,26 +237,36 @@ def migrate():
206237 "\" FeedbackUUID\" , \" AuditUserID\" ) VALUES ('2000 01 01 00:00:00.000000', 0, 0, 0);" )
207238
208239 # Set up all the columns we're going to migrate.
209- new_cursor .execute ("SELECT COLUMN_NAME FROM information_schema.COLUMNS WHERE TABLE_NAME = '" + table + "';" )
240+ cursor = old_cursor .execute ("SELECT TOP 1 * FROM " + table + ";" )
241+ old_columns_with_types = {column [0 ].lower (): column [1 ] for column in cursor .description }
242+
243+ new_cursor .execute ("SELECT COLUMN_NAME, COLUMN_DEFAULT "
244+ "FROM information_schema.COLUMNS WHERE TABLE_NAME = '" + table + "';" )
210245 rows = new_cursor .fetchall ()
211246 # While we have the data ready: find out where dates are stored for historical data stuff. validity_index
212247 # stores in which column the date (ValidityTo) is stored
213248 validity_index = - 1
214249 if not historical :
215250 validity_index = get_validity_index (rows )
216251 # Finally, set up the columns to migrate
217- old_cols = ""
218- new_cols = "("
252+ sequence_columns = {}
253+ old_cols_list = []
254+ new_cols_list = []
219255 for row in rows :
220- old_cols = old_cols + str (row )[2 :- 3 ] + ", "
221- new_cols = new_cols + "\" " + str (row )[2 :- 3 ] + "\" , "
222- old_cols = old_cols [:- 2 ]
223- new_cols = new_cols [:- 2 ] + ")"
256+ if row [0 ] not in EXCLUDED_COLUMNS and row [0 ].lower () in old_columns_with_types :
257+ col_default = extract_sequence_name (row [1 ])
258+ if col_default :
259+ sequence_columns [row [0 ]] = col_default
260+ old_cols_list .append (row [0 ])
261+ new_cols_list .append (f'"{ row [0 ]} "' )
262+ old_cols = ", " .join (old_cols_list )
263+ new_cols = "(" + ", " .join (new_cols_list ) + ")"
224264
225265 # Get the data from the old db with these column specifications
226266 print (" Fetching data from old database." )
227267 old_cursor .execute ("SELECT COUNT(*) FROM " + table + ";" )
228- print (" Found " + str (old_cursor .fetchone ())[1 :- 3 ] + " entries." )
268+ print (f" Found { old_cursor .fetchone ()[0 ]} entries." )
269+ print (f" == old_cols: { old_cols } from { table } ==" )
229270 old_cursor .execute ("SELECT " + old_cols + " FROM " + table + ";" )
230271
231272 # Set up the values for the insert statement and execute
@@ -247,14 +288,19 @@ def migrate():
247288 # Not rolling back leads to an InFailedSqlTransaction exception.
248289 new_connection .rollback ()
249290 pass
250-
291+ except Exception as e :
292+ print ("Failed: INSERT INTO \" " + table + "\" " + new_cols + " VALUES " + row_str + ";" )
293+ raise
294+ if sequence_columns :
295+ print (" Data transferred, updating sequences." )
296+ for column , sequence in sequence_columns .items ():
297+ new_cursor .execute (f"select setval('{ sequence } ', max(\" { column } \" )) from \" { table } \" ;" )
251298 print (" Table " + table + " has been migrated.\n " )
252299
253300 # Table doesn't exist
254301 else :
255- print ("\x1b [0;31;48m" + "WARNING: Table " + table + \
256- " only exists in one of the databases (but not the other)! Is this correct?" + "\x1b [0m\n " )
257- print ("" )
302+ print (f"\x1b [0;31;48mWARNING: Table { table } only exists in one of the databases "
303+ f"new: { table in new_tables } , old:{ table in old_tables } )! Is this correct?\x1b [0m\n " )
258304 lonely_tables .append (table )
259305
260306 # Print all tables that have not been migrated due to missing schemas:
0 commit comments