3 # Tool for upgrading/converting a db
5 # 1) Databse Schema - schema for the new database you what to upgrade to
6 # 2) Config File - the config file that describes how to convert the db
9 # 1) Will attempt to convert the db defined in /etc/planetlab/plc_config
10 # 2) Does not automatically drop archived database. They must be removed
19 config_file = "/etc/planetlab/plc_config"
20 execfile(config_file, config)
21 upgrade_config_file = "plcdb.3-4.conf"
22 schema_file = "planetlab4.sql"
27 print "Usage: %s [OPTION] UPGRADE_CONFIG_FILE " % sys.argv[0]
29 print " -s, --schema=FILE Upgraded Database Schema"
30 print " -t, --temp-dir=DIR Temp Directory"
31 print " --help This message"
35 (opts, argv) = getopt.getopt(sys.argv[1:],
40 except getopt.GetoptError, err:
41 print "Error: ", err.msg
44 for (opt, optval) in opts:
45 if opt == "-s" or opt == "--schema":
47 elif opt == "-d" or opt == "--temp-dir":
52 upgrade_config_file = argv[0]
54 print "Error: too few arguments"
59 schema_items_ordered = []
64 # load conf file for this upgrade
67 execfile(upgrade_config_file, upgrade_config)
68 upgrade_config.pop('__builtins__')
69 db_version_previous = upgrade_config['DB_VERSION_PREVIOUS']
70 db_version_new = upgrade_config['DB_VERSION_NEW']
72 except IOError, fault:
73 print "Error: upgrade config file (%s) not found. Exiting" % \
76 except KeyError, fault:
77 print "Error: %s not set in upgrade confing (%s). Exiting" % \
78 (fault, upgrade_config_file)
85 db = pgdb.connect(user = config['PLC_DB_USER'],
86 database = config['PLC_DB_NAME'])
89 def archive_db(database, archived_database):
91 archive_db = " dropdb -U postgres %s > /dev/null 2>&1;" \
92 " psql template1 postgres -qc " \
93 " 'ALTER DATABASE %s RENAME TO %s;';" % \
94 (archived_database, database, archived_database)
95 exit_status = os.system(archive_db)
97 print "Error: unable to archive database. Upgrade failed"
99 #print "Status: %s has been archived. now named %s" % (database, archived_database)
102 def encode_utf8(inputfile_name, outputfile_name):
103 # rewrite a iso-8859-1 encoded file in utf8
105 inputfile = open(inputfile_name, 'r')
106 outputfile = open(outputfile_name, 'w')
107 for line in inputfile:
108 if line.upper().find('SET CLIENT_ENCODING') > -1:
110 outputfile.write(unicode(line, 'iso-8859-1').encode('utf8'))
114 print 'error encoding file'
117 def create_item_from_schema(item_name):
120 (type, body_list) = schema[item_name]
121 exit_status = os.system('psql %s %s -qc "%s" > /dev/null 2>&1' % \
122 (config['PLC_DB_NAME'], config['PLC_DB_USER'],"".join(body_list) ) )
125 except Exception, fault:
126 print 'Error: create %s failed. Check schema.' % item_name
131 print "Error: cannot create %s. definition not found in %s" % \
135 def fix_row(row, table_name, table_fields):
137 if table_name in ['nodenetworks']:
138 # convert str bwlimit to bps int
139 bwlimit_index = table_fields.index('bwlimit')
140 if isinstance(row[bwlimit_index], int):
142 elif row[bwlimit_index].find('mbit') > -1:
143 row[bwlimit_index] = int(row[bwlimit_index].split('mbit')[0]) \
145 elif row[bwlimit_index].find('kbit') > -1:
146 row[bwlimit_index] = int(row[bwlimit_index].split('kbit')[0]) \
148 elif table_name in ['slice_attribute']:
149 # modify some invalid foreign keys
150 attribute_type_index = table_fields.index('attribute_type_id')
151 if row[attribute_type_index] == 10004:
152 row[attribute_type_index] = 10016
153 elif row[attribute_type_index] == 10006:
154 row[attribute_type_index] = 10017
155 elif table_name in ['slice_attribute_types']:
156 type_id_index = table_fields.index('attribute_type_id')
157 if row[type_id_index] in [10004, 10006]:
161 def fix_table(table, table_name, table_fields):
162 if table_name in ['slice_attribute_types']:
163 # remove duplicate/redundant primary keys
164 type_id_index = table_fields.index('attribute_type_id')
166 if row[type_id_index] in [10004, 10006]:
170 def remove_temp_tables():
173 for temp_table in temp_tables:
174 os.remove(temp_tables[temp_table])
178 def generate_temp_table(table_name, db):
181 # get upgrade directions
182 table_def = upgrade_config[table_name].replace('(', '').replace(')', '').split(',')
183 table_fields, old_fields, joins, wheres = [], [], set(), set()
184 for field in table_def:
185 field_parts = field.strip().split(':')
186 table_fields.append(field_parts[0])
187 old_fields.append(field_parts[1])
189 joins.update(set(filter(lambda x: not x.find('=') > -1, field_parts[2:])))
190 wheres.update(set(filter(lambda x: x.find('=') > -1, field_parts[2:])))
192 # get indices of fields that cannot be null
193 (type, body_list) = schema[table_name]
194 not_null_indices = []
195 for field in table_fields:
196 for body_line in body_list:
197 if body_line.find(field) > -1 and \
198 body_line.upper().find("NOT NULL") > -1:
199 not_null_indices.append(table_fields.index(field))
201 # get index of primary key
202 primary_key_indices = []
203 for body_line in body_list:
204 if body_line.find("PRIMARY KEY") > -1:
205 primary_key = body_line
206 for field in table_fields:
207 if primary_key.find(field) > -1:
208 primary_key_indices.append(table_fields.index(field))
212 get_old_data = "SELECT DISTINCT %s FROM %s" % \
213 (", ".join(old_fields), old_fields[0].split(".")[0])
215 get_old_data = get_old_data + " INNER JOIN %s USING (%s) " % \
216 (join.split('.')[0], join.split('.')[1])
218 get_old_data = get_old_data + " WHERE "
220 get_old_data = get_old_data + " %s" % where
221 cursor.execute(get_old_data)
222 rows = cursor.fetchall()
224 # write data to a temp file
225 temp_file_name = '%s/%s.tmp' % (temp_dir, table_name)
226 temp_file = open(temp_file_name, 'w')
228 # attempt to make any necessary fixes to data
229 row = fix_row(row, table_name, table_fields)
230 # do not attempt to write null rows
233 # do not attempt to write rows with null primary keys
234 if filter(lambda x: row[x] == None, primary_key_indices):
236 for i in range(len(row)):
237 # convert nulls into something pg can understand
239 if i in not_null_indices:
240 # XX doesnt work if column is int type
244 if isinstance(row[i], int) or isinstance(row[i], float):
246 # escape whatever can mess up the data format
247 if isinstance(row[i], str):
248 row[i] = row[i].replace('\t', '\\t')
249 row[i] = row[i].replace('\n', '\\n')
250 row[i] = row[i].replace('\r', '\\r')
251 data_row = "\t".join(row)
252 temp_file.write(data_row + "\n")
253 temp_file.write("\.\n")
255 temp_tables[table_name] = temp_file_name
258 #print "WARNING: cannot upgrade %s. upgrade def not found. skipping" % \
261 except IndexError, fault:
262 print "Error: error found in upgrade config file. " \
263 "check %s configuration. Aborting " % \
267 print "Error: configuration for %s doesnt match db schema. " \
268 " Aborting" % (table_name)
276 # Connect to current db
280 # determin current db version
282 cursor.execute("SELECT relname from pg_class where relname = 'plc_db_version'")
283 rows = cursor.fetchall()
285 print "Warning: current db has no version. Unable to validate config file."
287 cursor.execute("SELECT version FROM plc_db_version")
288 rows = cursor.fetchall()
289 if not rows or not rows[0]:
290 print "Warning: current db has no version. Unable to validate config file."
291 elif rows[0][0] == db_version_new:
292 print "Status: Versions are the same. No upgrade necessary."
294 elif not rows[0][0] == db_version_previous:
295 print "Stauts: DB_VERSION_PREVIOUS in config file (%s) does not" \
296 " match current db version %d" % (upgrade_config_file, rows[0][0])
299 print "STATUS: attempting upgrade from %d to %d" % \
300 (db_version_previous, db_version_new)
303 sql = " SELECT pg_catalog.pg_encoding_to_char(d.encoding)" \
304 " FROM pg_catalog.pg_database d " \
305 " WHERE d.datname = '%s' " % config['PLC_DB_NAME']
307 rows = cursor.fetchall()
308 if rows[0][0] not in ['UTF8', 'UNICODE']:
309 print "WARNING: db encoding is not utf8. Attempting to encode"
312 dump_file = '%s/dump.sql' % (temp_dir)
313 dump_file_encoded = dump_file + ".utf8"
314 dump_cmd = 'pg_dump -i %s -U postgres -f %s > /dev/null 2>&1' % \
315 (config['PLC_DB_NAME'], dump_file)
316 if os.system(dump_cmd):
317 print "ERROR: during db dump. Exiting."
319 # encode dump to utf8
320 print "Status: encoding database dump"
321 encode_utf8(dump_file, dump_file_encoded)
322 # archive original db
323 archive_db(config['PLC_DB_NAME'], config['PLC_DB_NAME']+'_sqlascii_archived')
324 # create a utf8 database and upload encoded data
325 recreate_cmd = 'createdb -U postgres -E UTF8 %s > /dev/null; ' \
326 'psql -a -U %s %s < %s > /dev/null 2>&1;' % \
327 (config['PLC_DB_NAME'], config['PLC_DB_USER'], \
328 config['PLC_DB_NAME'], dump_file_encoded)
329 print "Status: recreating database as utf8"
330 if os.system(recreate_cmd):
331 print "Error: database encoding failed. Aborting"
334 os.remove(dump_file_encoded)
343 # parse the schema user wishes to upgrade to
345 file = open(schema_file, 'r')
347 lines = file.readlines()
348 while index < len(lines):
350 # find all created objects
351 if line.startswith("CREATE"):
352 line_parts = line.split(" ")
353 item_type = line_parts[1]
354 item_name = line_parts[2]
355 schema_items_ordered.append(item_name)
356 if item_type in ['INDEX']:
357 schema[item_name] = (item_type, line)
359 # functions, tables, views span over multiple lines
360 # handle differently than indexes
361 elif item_type in ['AGGREGATE', 'TABLE', 'VIEW']:
363 while index < len(lines):
365 nextline =lines[index]
366 # look for any sequences
367 if item_type in ['TABLE'] and nextline.find('serial') > -1:
368 sequences[item_name] = nextline.strip().split()[0]
369 fields.append(nextline)
370 if nextline.find(";") >= 0:
372 schema[item_name] = (item_type, fields)
374 print "Error: unknown type %s" % item_type
375 elif line.startswith("INSERT"):
382 print "Status: generating temp tables"
383 # generate all temp tables
384 for key in schema_items_ordered:
385 (type, body_list) = schema[key]
387 generate_temp_table(key, db)
389 # disconenct from current database and archive it
393 print "Status: archiving database"
394 archive_db(config['PLC_DB_NAME'], config['PLC_DB_NAME']+'_archived')
395 os.system('createdb -U postgres -E UTF8 %s > /dev/null; ' % config['PLC_DB_NAME'])
397 print "Status: upgrading database"
398 # attempt to create and load all items from schema into temp db
400 for key in schema_items_ordered:
401 (type, body_list) = schema[key]
402 create_item_from_schema(key)
404 if upgrade_config.has_key(key):
405 # attempt to populate with temp table data
406 table_def = upgrade_config[key].replace('(', '').replace(')', '').split(',')
407 table_fields = [field.strip().split(':')[0] for field in table_def]
408 insert_cmd = "psql %s %s -c " \
409 " 'COPY %s (%s) FROM stdin;' < %s " % \
410 (config['PLC_DB_NAME'], config['PLC_DB_USER'], key,
411 ", ".join(table_fields), temp_tables[key] )
412 exit_status = os.system(insert_cmd)
414 print "Error: upgrade %s failed" % key
416 # update the primary key sequence
417 if sequences.has_key(key):
418 sequence = key +"_"+ sequences[key] +"_seq"
419 update_seq = "psql %s %s -c " \
420 " \"select setval('%s', max(%s)) FROM %s;\" > /dev/null" % \
421 (config['PLC_DB_NAME'], config['PLC_DB_USER'], sequence,
423 exit_status = os.system(update_seq)
425 print "Error: sequence %s update failed" % sequence
428 # check if there are any insert stmts in schema for this table
429 print "Warning: %s has no temp data file. Unable to populate with old data" % key
430 for insert_stmt in inserts:
431 if insert_stmt.find(key) > -1:
432 insert_cmd = 'psql %s postgres -qc "%s;" > /dev/null 2>&1' % \
433 (config['PLC_DB_NAME'], insert_stmt)
434 os.system(insert_cmd)
436 print "Error: failed to populate db. Unarchiving original database and aborting"
437 undo_command = "dropdb -U postgres %s > /dev/null; psql template1 postgres -qc" \
438 " 'ALTER DATABASE %s RENAME TO %s;'; > /dev/null" % \
439 (config['PLC_DB_NAME'], config['PLC_DB_NAME']+'_archived', config['PLC_DB_NAME'])
440 os.system(undo_command)
446 print "upgrade complete"