1 | /** |
2 | * Copyright 2009 ATG DUST Project |
3 | * |
4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
5 | * you may not use this file except in compliance with the License. |
6 | * |
7 | * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 |
8 | * |
9 | * Unless required by applicable law or agreed to in writing, software |
10 | * distributed under the License is distributed on an "AS IS" BASIS, |
11 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
12 | * See the License for the specific language governing permissions and limitations under the License. |
13 | */ |
14 | package atg.adapter.gsa; |
15 | |
16 | import java.sql.Connection; |
17 | import java.sql.SQLException; |
18 | import java.util.ArrayList; |
19 | import java.util.Iterator; |
20 | import java.util.List; |
21 | |
22 | |
23 | import org.apache.ddlutils.DatabaseOperationException; |
24 | import org.apache.ddlutils.Platform; |
25 | import org.apache.ddlutils.PlatformFactory; |
26 | import org.apache.ddlutils.model.Column; |
27 | import org.apache.ddlutils.model.Database; |
28 | import org.apache.ddlutils.model.ForeignKey; |
29 | import org.apache.ddlutils.model.IndexColumn; |
30 | import org.apache.ddlutils.model.Reference; |
31 | import org.apache.ddlutils.model.UniqueIndex; |
32 | |
33 | import atg.repository.RepositoryException; |
34 | |
35 | /** |
36 | * This class is used to generate drop and alter a database schema required for |
37 | * a given repository. It uses the Apache DDLUtils tools for the actual schema |
38 | * manipulation. To use the class first initialize it's "model" by passing a |
39 | * GSARepository to the constructor. Afterwards you may invoke the action |
40 | * methods such as: |
41 | * <ul> |
42 | * <li>createSchema - Creates schema including constraints. |
43 | * <li>dropSchema - Drops the schema including constraints. |
44 | * <li>alterSchema - Attempts to alter an existing schema into the one currently |
45 | * required for the given repository. |
46 | * </ul> |
47 | * These methods affect the DataSource used by the given GSARepository. If that |
48 | * DataSource is not accessible then these methods will fail. Schema |
49 | * modification may continue or fail on error. Set the <code>strict</code> |
50 | * property to true to always fail on error. The default is to continue on |
51 | * error. |
52 | * |
53 | * @version $Id: //test/UnitTests/base/main/src/Java/atg/adapter/gsa/ |
54 | * GSARepositorySchemaGenerator.java#1 $ |
55 | * @author adamb |
56 | */ |
57 | public class GSARepositorySchemaGenerator { |
58 | |
59 | // The repository upon which we are working. |
60 | public GSARepository mRepository = null; |
61 | // The DDLUtils Platform object |
62 | public Platform mPlatform = null; |
63 | // The DDLUtils Database Model |
64 | public Database mDatabase = null; |
65 | // Tool for mapping database types |
66 | public DatabaseTypeNameToJDBC mDatabaseTypeNameToJDBC = null; |
67 | |
68 | // ----------------------------- |
69 | /** |
70 | * Creates a new GSARepositorySchemaGenerator and initializes it with a model |
71 | * based upon the given repository. |
72 | * |
73 | * @param pRepository |
74 | * @throws RepositoryException |
75 | */ |
76 | public GSARepositorySchemaGenerator(GSARepository pRepository) |
77 | throws RepositoryException { |
78 | buildModel(pRepository); |
79 | } |
80 | |
81 | // ----------------------------- |
82 | /** |
83 | * Initialize this class with a model for the given repository. Any previous |
84 | * model will be discarded. |
85 | * |
86 | * @param pRepository |
87 | * @throws RepositoryException |
88 | */ |
89 | public void buildModel(GSARepository pRepository) throws RepositoryException { |
90 | mDatabaseTypeNameToJDBC = new DatabaseTypeNameToJDBC(pRepository |
91 | .getDatabaseTableInfo()); |
92 | mRepository = pRepository; |
93 | mPlatform = PlatformFactory.createNewPlatformInstance(pRepository |
94 | .getDataSource()); |
95 | mDatabase = new Database(); |
96 | mDatabase.setName(pRepository.getAbsoluteName()); |
97 | mDatabase.setVersion("1.0"); |
98 | String[] names = pRepository.getItemDescriptorNames(); |
99 | for (String name : names) { |
100 | GSAItemDescriptor desc = (GSAItemDescriptor) pRepository |
101 | .getItemDescriptor(name); |
102 | Table[] tables = desc.getTables(); |
103 | // first do primary tables |
104 | processTables(pRepository, tables, true); |
105 | } |
106 | for (String name : names) { |
107 | GSAItemDescriptor desc = (GSAItemDescriptor) pRepository |
108 | .getItemDescriptor(name); |
109 | Table[] tables = desc.getTables(); |
110 | // then do the rest |
111 | desc.getPrimaryTable(); |
112 | processTables(pRepository, tables, false); |
113 | } |
114 | |
115 | } |
116 | |
117 | /** |
118 | * Walks the tables of this repository building up a DDLUtils model. |
119 | * |
120 | * @param pRepository |
121 | * @param tables |
122 | * @param pPrimary |
123 | * - if True only processes primary tables |
124 | * @throws RepositoryException |
125 | */ |
126 | private void processTables(GSARepository pRepository, Table[] tables, |
127 | boolean pPrimary) throws RepositoryException { |
128 | for (Table table : tables) { |
129 | if (!table.isInherited() && (table.isPrimaryTable() == pPrimary)) { |
130 | // track tables here. if we have multiple repositories |
131 | // using the same table we don't want to double create. |
132 | // actually the problem is more a single |
133 | // repository that is reusing a table for multiple |
134 | // purposes |
135 | |
136 | List<GSARepository> repositoriesUsingTable = SchemaTracker |
137 | .getSchemaTracker().getTableToRepository().get(table.getName()); |
138 | if (repositoriesUsingTable != null |
139 | && repositoriesUsingTable.contains(pRepository)) { |
140 | pRepository.logWarning("Table " + table.getName() |
141 | + " already defined by repository " |
142 | + repositoriesUsingTable.toString() |
143 | + " skipping schema creation for this table. multi=" |
144 | + table.isMultiTable() + " auxiliary=" + table.isAuxiliaryTable() |
145 | + " primary=" + table.isPrimaryTable()); |
146 | } else { |
147 | // Only add the model if we have never seen this table created |
148 | buildSingleTableModel(mDatabase, table, pRepository); |
149 | repositoriesUsingTable = new ArrayList<GSARepository>(); |
150 | repositoriesUsingTable.add(pRepository); |
151 | } |
152 | SchemaTracker.getSchemaTracker().getTableToRepository().put( |
153 | table.getName(), repositoriesUsingTable); |
154 | } |
155 | } |
156 | } |
157 | |
158 | // ----------------------------- |
159 | /** |
160 | * Adds the definition of the given table to the current DDLUtils database |
161 | * model. |
162 | * |
163 | * @param pDb |
164 | * @param pTable |
165 | * @param pRepository |
166 | * @throws RepositoryException |
167 | */ |
168 | void buildSingleTableModel(Database pDb, Table pTable, |
169 | GSARepository pRepository) throws RepositoryException { |
170 | TableColumns columns = new TableColumns(pTable, pRepository |
171 | .getDatabaseTableInfo()); |
172 | pTable.collectColumnsForName(columns); |
173 | AccessibleTableColumns atable = new AccessibleTableColumns(columns); |
174 | |
175 | // -------------------------- |
176 | // Table Definition |
177 | // -------------------------- |
178 | org.apache.ddlutils.model.Table t = new org.apache.ddlutils.model.Table(); |
179 | t.setName(pTable.getName()); |
180 | pDb.addTable(t); |
181 | |
182 | // -------------------------- |
183 | // Add Columns |
184 | // -------------------------- |
185 | |
186 | ColumnDefinitionNode columnDefinition = null; |
187 | boolean proceed = false; |
188 | |
189 | for (columnDefinition = atable.getHead(), proceed = true; columnDefinition != null |
190 | && proceed; columnDefinition = columnDefinition.mNext) { |
191 | // No need to iterate the next time if there is just one element in the |
192 | // linked list |
193 | if (atable.getHead() == atable.getTail()) |
194 | proceed = false; |
195 | |
196 | Column c = new Column(); |
197 | |
198 | // -------------------------- |
199 | // Column Name |
200 | // -------------------------- |
201 | |
202 | c.setName(columnDefinition.mColumnName); |
203 | t.addColumn(c); |
204 | |
205 | // -------------------------- |
206 | // Column Type |
207 | // -------------------------- |
208 | |
209 | setupColumnType(pRepository, columnDefinition, c); |
210 | |
211 | // -------------------------- |
212 | // Primary Key |
213 | // -------------------------- |
214 | |
215 | if (atable.getPrimaryKeys().contains(c.getName()) |
216 | || c.getName().equals(atable.getMultiColumnName())) { |
217 | c.setPrimaryKey(true); |
218 | } |
219 | |
220 | // -------------------------- |
221 | // Null/NotNull |
222 | // -------------------------- |
223 | |
224 | if (columnDefinition.mIsRequired |
225 | || atable.getPrimaryKeys().contains(columnDefinition.mColumnName)) { |
226 | c.setRequired(true); |
227 | } else { |
228 | c.setRequired(false); |
229 | } |
230 | |
231 | // -------------------------- |
232 | // Unique Index |
233 | // DDLUtils doesn't yet to UNIQUE constraints.. Hmph |
234 | // -------------------------- |
235 | |
236 | if (columnDefinition.mIsUnique) { |
237 | UniqueIndex uniqueIndex = new UniqueIndex(); |
238 | uniqueIndex.setName("uidx_" + t.getName() + "_" + c.getName()); |
239 | uniqueIndex.addColumn(new IndexColumn(c)); |
240 | t.addIndex(uniqueIndex); |
241 | } |
242 | |
243 | // -------------------------- |
244 | // References Constraint |
245 | // -------------------------- |
246 | |
247 | if (columnDefinition.mReferenced != null && !columns.mVersioned) { |
248 | |
249 | ForeignKey foreignKey = new ForeignKey(); |
250 | Reference reference = new Reference(); |
251 | String referencedTableName = columnDefinition.mReferenced.substring(0, |
252 | columnDefinition.mReferenced.indexOf("(")); |
253 | String referencedColumnName = columnDefinition.mReferenced.substring( |
254 | columnDefinition.mReferenced.indexOf("(") + 1, |
255 | columnDefinition.mReferenced.indexOf(")")); |
256 | org.apache.ddlutils.model.Table referencedTable = pDb |
257 | .findTable(referencedTableName); |
258 | foreignKey.setName(t.getName() + c.getName() + "FK" |
259 | + referencedTableName + referencedColumnName); |
260 | if (referencedTable != null) { |
261 | Column referencedColumn = referencedTable |
262 | .findColumn(referencedColumnName); |
263 | if (referencedTable.getName().equals(t.getName()) |
264 | && referencedColumn.getName().equals(c.getName())) { |
265 | pRepository |
266 | .logWarning("Skipping foreign key constraint, table and column are the same. Table.Column=" |
267 | + referencedTableName + "." + referencedColumnName); |
268 | } else { |
269 | reference.setForeignColumn(referencedColumn); |
270 | reference.setLocalColumn(c); |
271 | foreignKey.addReference(reference); |
272 | foreignKey.setForeignTable(referencedTable); |
273 | t.addForeignKey(foreignKey); |
274 | } |
275 | } |
276 | |
277 | // -------------------------- |
278 | // Foreign Keys |
279 | // -------------------------- |
280 | |
281 | if (atable.getForeignKeys() != null && !columns.mVersioned) { |
282 | // TODO: Add ForeignKeys |
283 | } |
284 | } |
285 | } |
286 | |
287 | } |
288 | |
289 | /** |
290 | * Determines the appropriate jdbc type for the given ColumnDefinitionNode and |
291 | * sets that in Column "c". |
292 | * |
293 | * @param pRepository |
294 | * @param columnDefinition |
295 | * @param c |
296 | */ |
297 | void setupColumnType(GSARepository pRepository, |
298 | ColumnDefinitionNode columnDefinition, Column c) { |
299 | c.setDescription(columnDefinition.mDataTypeString); |
300 | String typeName = null; |
301 | String size = null; |
302 | if (columnDefinition.mDataTypeString.contains("(")) { |
303 | typeName = columnDefinition.mDataTypeString.substring(0, |
304 | columnDefinition.mDataTypeString.indexOf("(")); |
305 | size = columnDefinition.mDataTypeString.substring( |
306 | columnDefinition.mDataTypeString.indexOf("(") + 1, |
307 | columnDefinition.mDataTypeString.indexOf(")")); |
308 | } else { |
309 | typeName = columnDefinition.mDataTypeString; |
310 | } |
311 | |
312 | String precision = null; |
313 | String scale = null; |
314 | if (size != null) { |
315 | if (size.contains(",")) { |
316 | precision = size.substring(0, size.indexOf(",")); |
317 | scale = size.substring(size.indexOf(",") + 1, size.length()); |
318 | c.setPrecisionRadix(Integer.parseInt(precision.trim())); |
319 | c.setScale(Integer.parseInt(scale.trim())); |
320 | } else { |
321 | c.setSize(size); |
322 | } |
323 | } |
324 | |
325 | c.setTypeCode(mDatabaseTypeNameToJDBC.databaseTypeNametoJDBCType(typeName)); |
326 | } |
327 | |
328 | // ----------------------------- |
329 | /** |
330 | * Creates the schema based on the current model. If no model has been |
331 | * created, this method throws a NoModelException. |
332 | * |
333 | * @param pContinueOnError |
334 | * - If true, continue on error, else fail. |
335 | * @param pDrop |
336 | * - If true, drops schema first before attempting to create it. |
337 | * @throws SQLException |
338 | * @throws DatabaseOperationException |
339 | */ |
340 | public void createSchema(final boolean pContinueOnError, final boolean pDrop) |
341 | throws DatabaseOperationException, SQLException { |
342 | boolean success = new DoInAutoCommit(this, mRepository) |
343 | .doInAutoCommit(new AutoCommitable() { |
344 | @Override |
345 | public void doInAutoCommit(Connection pConnection) { |
346 | mPlatform.createTables(pConnection, mDatabase, pDrop, |
347 | pContinueOnError); |
348 | } |
349 | }); |
350 | if (!success) |
351 | throw new DatabaseOperationException("Failed to create tables."); |
352 | } |
353 | |
354 | // ----------------------------- |
355 | /** |
356 | * Drops the schema based on the current model. If no model has been created, |
357 | * this method throws a NoModelException. |
358 | * |
359 | * @param pContinueOnError |
360 | * - If true, continue on error, else fail. |
361 | * @throws SQLException |
362 | * @throws DatabaseOperationException |
363 | */ |
364 | public void dropSchema(final boolean pContinueOnError) |
365 | throws DatabaseOperationException, SQLException { |
366 | boolean success = new DoInAutoCommit(this, mRepository) |
367 | .doInAutoCommit(new AutoCommitable() { |
368 | @Override |
369 | public void doInAutoCommit(Connection pConnection) { |
370 | mPlatform.dropTables(pConnection, mDatabase, pContinueOnError); |
371 | } |
372 | }); |
373 | if (!success) |
374 | throw new DatabaseOperationException("Failed to drop tables."); |
375 | } |
376 | |
377 | // ----------------------------- |
378 | /** |
379 | * Alters the schema based on the current model. If no model has been created, |
380 | * this method throws a NoModelException. This method attempts to preserve the |
381 | * data in the target database. |
382 | * |
383 | * @param pContinueOnError |
384 | * - If true, fail on error, else continue on error. |
385 | * @throws SQLException |
386 | * @throws DatabaseOperationException |
387 | */ |
388 | public void alterSchema(final boolean pContinueOnError) |
389 | throws DatabaseOperationException, SQLException { |
390 | mPlatform.alterTables(mRepository.getConnection(), mDatabase, |
391 | pContinueOnError); |
392 | boolean success = new DoInAutoCommit(this, mRepository) |
393 | .doInAutoCommit(new AutoCommitable() { |
394 | @Override |
395 | public void doInAutoCommit(Connection pConnection) { |
396 | mPlatform.alterTables(pConnection, mDatabase, pContinueOnError); |
397 | } |
398 | }); |
399 | if (!success) |
400 | throw new DatabaseOperationException("Failed to alter tables."); |
401 | } |
402 | } |