1. Project Clover database Tue Dec 20 2016 21:24:09 CET
  2. Package com.xpn.xwiki.store.migration.hibernate

File R40000XWIKI6990DataMigration.java

 

Coverage histogram

../../../../../../img/srcFileCovDistChart0.png
83% of files have more coverage

Code metrics

182
531
64
7
1,877
1,239
176
0.33
8.3
9.14
2.75

Classes

Class Line # Actions
R40000XWIKI6990DataMigration 102 449 0% 146 656
0.00303951370.3%
R40000XWIKI6990DataMigration.StatsIdComputer 142 5 0% 3 8
0.00%
R40000XWIKI6990DataMigration.IdConversionHibernateCallback 180 0 - 0 0
-1.0 -
R40000XWIKI6990DataMigration.AbstractUpdateHibernateCallback 200 5 0% 1 6
0.00%
R40000XWIKI6990DataMigration.AbstractIdConversionHibernateCallback 233 14 0% 6 20
0.00%
R40000XWIKI6990DataMigration.AbstractBulkIdConversionHibernateCallback 332 58 0% 20 85
0.00%
R40000XWIKI6990DataMigration.CustomMappingCallback 553 0 - 0 0
-1.0 -
 

Contributing tests

No tests hitting this source file were found.

Source view

1    /*
2    * See the NOTICE file distributed with this work for additional
3    * information regarding copyright ownership.
4    *
5    * This is free software; you can redistribute it and/or modify it
6    * under the terms of the GNU Lesser General Public License as
7    * published by the Free Software Foundation; either version 2.1 of
8    * the License, or (at your option) any later version.
9    *
10    * This software is distributed in the hope that it will be useful,
11    * but WITHOUT ANY WARRANTY; without even the implied warranty of
12    * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
13    * Lesser General Public License for more details.
14    *
15    * You should have received a copy of the GNU Lesser General Public
16    * License along with this software; if not, write to the Free
17    * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
18    * 02110-1301 USA, or see the FSF site: http://www.fsf.org.
19    */
20   
21    package com.xpn.xwiki.store.migration.hibernate;
22   
23    import java.io.StringReader;
24    import java.util.ArrayList;
25    import java.util.HashMap;
26    import java.util.HashSet;
27    import java.util.Iterator;
28    import java.util.LinkedList;
29    import java.util.List;
30    import java.util.Map;
31    import java.util.Queue;
32    import java.util.Set;
33   
34    import javax.inject.Inject;
35    import javax.inject.Named;
36    import javax.inject.Singleton;
37   
38    import org.apache.commons.lang3.StringUtils;
39    import org.dom4j.Element;
40    import org.dom4j.io.SAXReader;
41    import org.hibernate.HibernateException;
42    import org.hibernate.Query;
43    import org.hibernate.Session;
44    import org.hibernate.cfg.Configuration;
45    import org.hibernate.dialect.Dialect;
46    import org.hibernate.engine.Mapping;
47    import org.hibernate.mapping.Column;
48    import org.hibernate.mapping.ForeignKey;
49    import org.hibernate.mapping.Index;
50    import org.hibernate.mapping.PersistentClass;
51    import org.hibernate.mapping.PrimaryKey;
52    import org.hibernate.mapping.Property;
53    import org.hibernate.mapping.Table;
54    import org.slf4j.Logger;
55    import org.xwiki.component.annotation.Component;
56    import org.xwiki.logging.LoggerManager;
57    import org.xwiki.model.reference.DocumentReference;
58    import org.xwiki.model.reference.DocumentReferenceResolver;
59    import org.xwiki.model.reference.EntityReferenceSerializer;
60   
61    import com.xpn.xwiki.XWikiContext;
62    import com.xpn.xwiki.XWikiException;
63    import com.xpn.xwiki.doc.DeletedAttachment;
64    import com.xpn.xwiki.doc.XWikiAttachment;
65    import com.xpn.xwiki.doc.XWikiDocument;
66    import com.xpn.xwiki.doc.XWikiLink;
67    import com.xpn.xwiki.doc.rcs.XWikiRCSNodeInfo;
68    import com.xpn.xwiki.objects.BaseObject;
69    import com.xpn.xwiki.objects.BaseObjectReference;
70    import com.xpn.xwiki.objects.BaseProperty;
71    import com.xpn.xwiki.objects.DBStringListProperty;
72    import com.xpn.xwiki.objects.DateProperty;
73    import com.xpn.xwiki.objects.DoubleProperty;
74    import com.xpn.xwiki.objects.FloatProperty;
75    import com.xpn.xwiki.objects.IntegerProperty;
76    import com.xpn.xwiki.objects.LargeStringProperty;
77    import com.xpn.xwiki.objects.LongProperty;
78    import com.xpn.xwiki.objects.StringListProperty;
79    import com.xpn.xwiki.objects.StringProperty;
80    import com.xpn.xwiki.stats.impl.DocumentStats;
81    import com.xpn.xwiki.stats.impl.RefererStats;
82    import com.xpn.xwiki.stats.impl.VisitStats;
83    import com.xpn.xwiki.stats.impl.XWikiStats;
84    import com.xpn.xwiki.store.DatabaseProduct;
85    import com.xpn.xwiki.store.XWikiHibernateBaseStore;
86    import com.xpn.xwiki.store.XWikiHibernateBaseStore.HibernateCallback;
87    import com.xpn.xwiki.store.XWikiHibernateStore;
88    import com.xpn.xwiki.store.migration.DataMigrationException;
89    import com.xpn.xwiki.store.migration.XWikiDBVersion;
90    import com.xpn.xwiki.util.Util;
91   
92    /**
93    * Migration for XWIKI-6990 Reduce the likelihood of having same (hibernate) document id for different documents. This
94    * data migration convert document ID to a new hash algorithm.
95    *
96    * @version $Id: 4a0bdb2287d42b825dc6168438ea7826c870acac $
97    * @since 4.0M1
98    */
99    @Component
100    @Named("R40000XWIKI6990")
101    @Singleton
 
102    public class R40000XWIKI6990DataMigration extends AbstractHibernateDataMigration
103    {
104    /** Document classes to migrate, using the document id in the first column of their key. */
105    private static final Class<?>[] DOC_CLASSES = new Class<?>[] {
106    XWikiDocument.class,
107    XWikiRCSNodeInfo.class,
108    XWikiLink.class
109    };
110   
111    /** Document related classes to migrate, using a property docId without FK information. */
112    private static final Class<?>[] DOCLINK_CLASSES = new Class<?>[] {
113    XWikiAttachment.class,
114    DeletedAttachment.class
115    };
116   
117    /** Property classes to migrate, using the object id in the first column of their key. */
118    private static final Class<?>[] PROPERTY_CLASS = new Class<?>[] {
119    DateProperty.class,
120    DBStringListProperty.class,
121    DoubleProperty.class,
122    FloatProperty.class,
123    IntegerProperty.class,
124    LargeStringProperty.class,
125    LongProperty.class,
126    StringListProperty.class,
127    StringProperty.class,
128    BaseProperty.class
129    };
130   
131    /** Statistics classes to migrate. (ID is the stats identifier) */
132    private static final Class<?>[] STATS_CLASSES = new Class<?>[] {
133    DocumentStats.class,
134    RefererStats.class,
135    VisitStats.class
136    };
137   
138    /** Mark internal mapping. */
139    private static final String INTERNAL = "internal";
140   
141    /** Stub statistic class used to compute new ids from existing objects. */
 
142    private static class StatsIdComputer extends XWikiStats
143    {
144    /** Name of the statistic. */
145    private String name;
146   
147    /** Number of the statistic. */
148    private int number;
149   
150    /**
151    * Return the new identifier for a statistic having given name and number.
152    *
153    * @param name the name of the statistic
154    * @param number the number of the statistic
155    * @return the hash to use for the new id of the statistic
156    */
 
157  0 toggle public long getId(String name, int number)
158    {
159  0 this.name = name;
160  0 this.number = number;
161  0 return super.getId();
162    }
163   
 
164  0 toggle @Override
165    public String getName()
166    {
167  0 return this.name;
168    }
169   
 
170  0 toggle @Override
171    public int getNumber()
172    {
173  0 return this.number;
174    }
175    }
176   
177    /**
178    * Specialized HibernateCallback for id conversion.
179    */
 
180    private interface IdConversionHibernateCallback extends HibernateCallback<Object>
181    {
182    /**
183    * Set the new identifier.
184    *
185    * @param newId the new identifier
186    */
187    void setNewId(long newId);
188   
189    /**
190    * Set the old identifier.
191    *
192    * @param oldId the old identifier
193    */
194    void setOldId(long oldId);
195    }
196   
197    /**
198    * Base class for hibernate callback to convert identifier.
199    */
 
200    private abstract static class AbstractUpdateHibernateCallback implements HibernateCallback<Object>
201    {
202    /** Place holder for new id. */
203    protected static final String NEWID = "newid";
204   
205    /** Place holder for old id. */
206    protected static final String OLDID = "oldid";
207   
208    /** The new identifier. */
209    protected Session session;
210   
211    /** The current timer. */
212    public int timer;
213   
 
214  0 toggle @Override
215    public Object doInHibernate(Session session)
216    {
217  0 this.timer = 0;
218  0 this.session = session;
219  0 doUpdate();
220  0 this.session = null;
221  0 return null;
222    }
223   
224    /**
225    * Implement this method to execute an update.
226    */
227    public abstract void doUpdate();
228    }
229   
230    /**
231    * Base implementation of the hibernate callback to convert identifier using individual updates (safe-mode).
232    */
 
233    private abstract static class AbstractIdConversionHibernateCallback
234    extends AbstractUpdateHibernateCallback implements IdConversionHibernateCallback
235    {
236    /** Name for the id column. */
237    public static final String ID = "id";
238   
239    /** Name for the subid column. */
240    public static final String IDID = "id.id";
241   
242    /** Name for the docid column. */
243    public static final String DOCID = "docId";
244   
245    /** The old identifier. */
246    private long oldId;
247   
248    /** The new identifier. */
249    private long newId;
250   
 
251  0 toggle @Override
252    public void setNewId(long newId)
253    {
254  0 this.newId = newId;
255    }
256   
 
257  0 toggle @Override
258    public void setOldId(long oldId)
259    {
260  0 this.oldId = oldId;
261    }
262   
 
263  0 toggle @Override
264    public void doUpdate()
265    {
266  0 doSingleUpdate();
267    }
268   
269    /**
270    * Implement this method to execute a single ID update using {@code executeIdUpdate()}.
271    */
272    public abstract void doSingleUpdate();
273   
274    /**
275    * Update object id in a given field for a given object class.
276    *
277    * @param klass the class of the persisted object
278    * @param field the field name of the persisted object
279    * @return the time elapsed during the operation
280    */
 
281  0 toggle public long executeIdUpdate(Class<?> klass, String field)
282    {
283  0 return executeIdUpdate(klass.getName(), field);
284    }
285   
286    /**
287    * Update object id in a given field of a given table.
288    *
289    * @param name the entity name of the table
290    * @param field the field name of the column
291    * @return the time elapsed during the operation
292    */
 
293  0 toggle public long executeIdUpdate(String name, String field)
294    {
295  0 StringBuilder sb = new StringBuilder(128);
296  0 sb.append("update ").append(name)
297    .append(" klass set klass.").append(field).append('=').append(':').append(NEWID)
298    .append(" where klass.").append(field).append('=').append(':').append(OLDID);
299  0 long now = System.nanoTime();
300  0 this.session.createQuery(sb.toString())
301    .setLong(NEWID, this.newId)
302    .setLong(OLDID, this.oldId)
303    .executeUpdate();
304  0 return System.nanoTime() - now;
305    }
306   
307    /**
308    * Update object id in a given native field of a given native table.
309    *
310    * @param name the native name of the table
311    * @param field the native name of the column
312    * @return the time elapsed during the operation
313    */
 
314  0 toggle public long executeSqlIdUpdate(String name, String field)
315    {
316  0 StringBuilder sb = new StringBuilder(128);
317  0 sb.append("UPDATE ").append(name)
318    .append(" SET ").append(field).append('=').append(':').append(NEWID)
319    .append(" WHERE ").append(field).append('=').append(':').append(OLDID);
320  0 long now = System.nanoTime();
321  0 this.session.createSQLQuery(sb.toString())
322    .setLong(NEWID, this.newId)
323    .setLong(OLDID, this.oldId)
324    .executeUpdate();
325  0 return System.nanoTime() - now;
326    }
327    }
328   
329    /**
330    * Base implementation of the hibernate callback to convert identifier using bulk updates.
331    */
 
332    private abstract class AbstractBulkIdConversionHibernateCallback
333    extends AbstractUpdateHibernateCallback
334    {
335    /** Name for the temporary entity name. */
336    private static final String TEMPENTITY = "XWikiIdMigration";
337   
338    /** Name for the temporary table. */
339    private static final String TEMPTABLE = "xwikiidmigration";
340   
341    /** Name for the old id column. */
342    private static final String OLDIDCOL = "XWM_OLDID";
343   
344    /** Name for the new id column. */
345    private static final String NEWIDCOL = "XWM_NEWID";
346   
347    /** Insert statement. */
348    private String insertStatement;
349   
 
350  0 toggle @Override
351    public void doUpdate()
352    {
353  0 prepareInsertStatement();
354   
355  0 createTemporaryTable();
356   
357  0 doBulkIdUpdate();
358   
359  0 dropTemporaryTable();
360    }
361   
362    /**
363    * @return a Configuration containing the entity for the temporary table.
364    */
 
365  0 toggle private Configuration getTempTableMapping()
366    {
367  0 Configuration hibconfig = new Configuration();
368  0 hibconfig.addXML(makeTempTableMapping());
369  0 hibconfig.buildMappings();
370  0 return hibconfig;
371    }
372   
373    /**
374    * @return an XML description of the temporary table for hibernate.
375    */
 
376  0 toggle private String makeTempTableMapping()
377    {
378  0 StringBuilder sb = new StringBuilder(2000);
379   
380  0 sb.append("<?xml version=\"1.0\"?>\n" + "<!DOCTYPE hibernate-mapping PUBLIC\n")
381    .append("\t\"-//Hibernate/Hibernate Mapping DTD//EN\"\n")
382    .append("\t\"http://www.hibernate.org/dtd/hibernate-mapping-3.0.dtd\">\n")
383    .append("<hibernate-mapping>")
384    .append("<class entity-name=\"").append(TEMPENTITY)
385    .append("\" table=\"").append(TEMPTABLE).append("\">\n")
386    .append(" <id name=\"").append(OLDID).append("\" type=\"long\" unsaved-value=\"any\">\n")
387    .append(" <column name=\"").append(OLDIDCOL).append("\" not-null=\"true\" ")
388  0 .append((R40000XWIKI6990DataMigration.this.isOracle) ? "sql-type=\"integer\" " : "")
389    .append("/>\n <generator class=\"assigned\" />\n")
390    .append(" </id>\n")
391    .append("<property name=\"").append(NEWID).append("\" type=\"long\"");
392   
393  0 if (R40000XWIKI6990DataMigration.this.isOracle) {
394  0 sb.append(">\n")
395    .append("<column name=\"").append(NEWIDCOL).append("\" sql-type=\"integer\" />\n")
396    .append("</property>\n");
397    } else {
398  0 sb.append(" column=\"").append(NEWIDCOL).append("\" not-null=\"true\" />\n");
399    }
400   
401  0 sb.append("</class>\n</hibernate-mapping>");
402   
403  0 return sb.toString();
404    }
405   
406    /**
407    * Create the temporary table using hibernate to obtain the creation string.
408    */
 
409  0 toggle private void createTemporaryTable()
410    {
411  0 Configuration tempConfig = getTempTableMapping();
412  0 Mapping mapping = tempConfig.buildMapping();
413  0 PersistentClass pClass = tempConfig.getClassMapping(TEMPENTITY);
414  0 if (!R40000XWIKI6990DataMigration.this.logger.isDebugEnabled()) {
415  0 R40000XWIKI6990DataMigration.this.loggerManager.pushLogListener(null);
416    }
417  0 try {
418  0 this.session.createSQLQuery(
419    pClass.getTable().sqlTemporaryTableCreateString(R40000XWIKI6990DataMigration.this.dialect,
420    mapping))
421    .executeUpdate();
422    } catch (Throwable t) {
423  0 R40000XWIKI6990DataMigration.this.logger.debug("unable to create temporary id migration table [{}]",
424    t.getMessage());
425    } finally {
426  0 if (!R40000XWIKI6990DataMigration.this.logger.isDebugEnabled()) {
427  0 R40000XWIKI6990DataMigration.this.loggerManager.popLogListener();
428    }
429    }
430    }
431   
432    /**
433    * Delete all rows in the temporary table, we never know, it may be reused, and drop it if required.
434    */
 
435  0 toggle private void dropTemporaryTable()
436    {
437  0 StringBuilder sb = new StringBuilder(128);
438  0 sb = new StringBuilder(128);
439   
440  0 sb.append("DELETE FROM ").append(TEMPTABLE);
441  0 this.session.createSQLQuery(sb.toString()).executeUpdate();
442   
443  0 if (R40000XWIKI6990DataMigration.this.dialect.dropTemporaryTableAfterUse()) {
444  0 sb = new StringBuilder(128);
445  0 sb.append("DROP TABLE ").append(TEMPTABLE);
446   
447  0 if (!R40000XWIKI6990DataMigration.this.logger.isDebugEnabled()) {
448  0 R40000XWIKI6990DataMigration.this.loggerManager.pushLogListener(null);
449    }
450  0 try {
451  0 this.session.createSQLQuery(sb.toString()).executeUpdate();
452    } catch (Throwable t) {
453  0 R40000XWIKI6990DataMigration.this.logger.debug("unable to drop temporary id migration table [{}]",
454    t.getMessage());
455    } finally {
456  0 if (!R40000XWIKI6990DataMigration.this.logger.isDebugEnabled()) {
457  0 R40000XWIKI6990DataMigration.this.loggerManager.popLogListener();
458    }
459    }
460    }
461    }
462   
463    /**
464    * Build the insert statement into the temporary table and store it for future use.
465    */
 
466  0 toggle private void prepareInsertStatement()
467    {
468  0 StringBuilder sb = new StringBuilder(128);
469  0 sb.append("INSERT INTO ").append(TEMPTABLE)
470    .append(" (").append(OLDIDCOL).append(',').append(NEWIDCOL).append(')')
471    .append(" VALUES (:").append(OLDID).append(",:").append(NEWID).append(')');
472   
473  0 this.insertStatement = sb.toString();
474    }
475   
476    /**
477    * Implement this method to execute a bulk ID update using {@code executeSqlIdUpdate()}.
478    */
479    public abstract void doBulkIdUpdate();
480   
481    /**
482    * Insert all ids from the provided map into the temporary table.
483    *
484    * @param map map of two long with the oldid as key, and the new id as value
485    * @return the time elapsed.
486    */
 
487  0 toggle public long insertIdUpdates(Map<Long, Long> map)
488    {
489  0 long elapsedTime = 0;
490  0 for (Map.Entry<Long, Long> entry : map.entrySet()) {
491  0 elapsedTime += executeSqlIdInsert(entry.getKey(), entry.getValue());
492    }
493  0 return elapsedTime;
494    }
495   
496    /**
497    * Execute an insert in the temporary table.
498    *
499    * @param oldId the old id to use as key
500    * @param newId the new id to be used for replacement
501    * @return the time elapsed
502    */
 
503  0 toggle private long executeSqlIdInsert(long oldId, long newId)
504    {
505  0 long now = System.nanoTime();
506  0 this.session.createSQLQuery(this.insertStatement)
507    .setLong(OLDID, oldId)
508    .setLong(NEWID, newId)
509    .executeUpdate();
510  0 return System.nanoTime() - now;
511    }
512   
513    /**
514    * Execute an update of all ids in the temporary table in the provided table.
515    *
516    * @param name name of the table to update.
517    * @param field name of the column to convert.
518    * @return the time elpased
519    */
 
520  0 toggle public long executeSqlIdUpdate(String name, String field)
521    {
522  0 StringBuilder sb = new StringBuilder(128);
523  0 if (R40000XWIKI6990DataMigration.this.isMySQL) {
524    // MySQL does not support multiple references to a temporary table in a single statement but support
525    // this non-standard SQL syntax with a single reference to the temporary table
526  0 sb.append("UPDATE ").append(name).append(" t, ").append(TEMPTABLE).append(" m")
527    .append(" SET t.").append(field).append('=').append("m.").append(NEWIDCOL)
528    .append(" WHERE t.").append(field).append('=').append("m.").append(OLDIDCOL);
529  0 } else if (R40000XWIKI6990DataMigration.this.isMSSQL) {
530    // MS-SQL does not support aliases on updated table, but support inner joins during updates
531  0 sb.append("UPDATE ").append(name)
532    .append(" SET ").append(field).append('=').append("m.").append(NEWIDCOL)
533    .append(" FROM ").append(name).append(" AS [t] INNER JOIN ")
534    .append(TEMPTABLE).append(" AS [m] ON (t.")
535    .append(field).append('=').append("m.").append(OLDIDCOL).append(')');
536    } else {
537  0 sb.append("UPDATE ").append(name)
538    .append(" t SET ").append(field).append('=')
539    .append("(SELECT m.").append(NEWIDCOL).append(" FROM ").append(TEMPTABLE)
540    .append(" m WHERE t.").append(field).append('=').append("m.").append(OLDIDCOL).append(')')
541    .append(" WHERE t.").append(field).append(" IN (SELECT ").append(OLDIDCOL)
542    .append(" FROM ").append(TEMPTABLE).append(')');
543    }
544  0 long now = System.nanoTime();
545  0 this.session.createSQLQuery(sb.toString()).executeUpdate();
546  0 return System.nanoTime() - now;
547    }
548    }
549   
550    /**
551    * A callback interface for processing custom mapped classes.
552    */
 
553    private interface CustomMappingCallback
554    {
555    /**
556    * Callback to process a custom mapped class.
557    *
558    * @param store the hibernate store
559    * @param name the name of the Xclass
560    * @param mapping the custom mapping of the Xclass
561    * @param hasDynamicMapping true if dynamic mapping is activated
562    * @throws com.xpn.xwiki.XWikiException if an error occurs during processing.
563    */
564    void processCustomMapping(XWikiHibernateStore store, String name, String mapping,
565    boolean hasDynamicMapping) throws XWikiException;
566    }
567   
568    /** Statistics ids computer. */
569    private StatsIdComputer statsIdComputer = new StatsIdComputer();
570   
571    /**
572    * Logger.
573    */
574    @Inject
575    private Logger logger;
576   
577    /** LoggerManager to suspend logging during normal faulty SQL operation. */
578    @Inject
579    private LoggerManager loggerManager;
580   
581    /** Resolve document names. */
582    @Inject
583    @Named("current")
584    private DocumentReferenceResolver<String> resolver;
585   
586    /** Serialize references to identifiers. */
587    @Inject
588    @Named("local/uid")
589    private EntityReferenceSerializer<String> serializer;
590   
591    /** Counter for change log rules. */
592    private int logCount;
593   
594    /** True if migrating MySQL. */
595    private boolean isMySQL;
596   
597    /** True if migrating MySQL tables using MyISAM engine. */
598    private boolean isMySQLMyISAM;
599   
600    /** True if migrating Oracle database. */
601    private boolean isOracle;
602   
603    /** True if migrating Microsoft SQL server database. */
604    private boolean isMSSQL;
605   
606    /** Tables in which update of foreign keys will be cascade from primary keys by a constraints. */
607    private Set<Table> fkTables = new HashSet<Table>();
608   
609    /** Hold the current store configuration. */
610    private Configuration configuration;
611   
612    /** Hold the current database dialect. */
613    private Dialect dialect;
614   
 
615  0 toggle @Override
616    public String getDescription()
617    {
618  0 return "Convert document IDs to use the new improved hash algorithm.";
619    }
620   
 
621  206 toggle @Override
622    public XWikiDBVersion getVersion()
623    {
624  206 return new XWikiDBVersion(40000);
625    }
626   
627    /**
628    * Log progress of the migration procedure at info level.
629    *
630    * @param message the message to log
631    * @param params some params, that will be inserted using String.format
632    */
 
633  0 toggle private void logProgress(String message, Object... params)
634    {
635  0 if (params.length > 0) {
636  0 this.logger.info("[{}] - {}", getName(), String.format(message, params));
637    } else {
638  0 this.logger.info("[{}] - {}", getName(), message);
639    }
640    }
641   
642    /**
643    * Calls callback for each custom mapped XClass defined. If needed, the mapping is added and injected at the end of
644    * the processing into the hibernate session factory.
645    *
646    * @param store the hibernate store
647    * @param callback the callback to be called
648    * @param context th current XWikiContext
649    * @throws XWikiException when an unexpected error occurs
650    */
 
651  0 toggle private void processCustomMappings(final XWikiHibernateStore store, final CustomMappingCallback callback,
652    final XWikiContext context)
653    throws XWikiException
654    {
655  0 if (store.executeRead(context, new HibernateCallback<Boolean>()
656    {
 
657  0 toggle @Override
658    public Boolean doInHibernate(Session session) throws XWikiException
659    {
660  0 boolean hasProcessedMapping = false;
661  0 try {
662  0 boolean hasDynamicMapping = context.getWiki().hasDynamicCustomMappings();
663  0 SAXReader saxReader = new SAXReader();
664  0 @SuppressWarnings("unchecked")
665    List<Object[]> results = session.createQuery(
666    "select doc.fullName, doc.xWikiClassXML from " + XWikiDocument.class.getName()
667    + " as doc where (doc.xWikiClassXML like '<%')").list();
668   
669    // Inspect all defined classes for custom mapped ones...
670  0 for (Object[] result : results) {
671  0 String docName = (String) result[0];
672  0 String classXML = (String) result[1];
673   
674  0 Element el = saxReader.read(new StringReader(classXML)).getRootElement()
675    .element("customMapping");
676   
677  0 String mapping = (el != null) ? el.getText() : "";
678   
679  0 if (StringUtils.isEmpty(mapping) && "XWiki.XWikiPreferences".equals(docName)) {
680  0 mapping = INTERNAL;
681    }
682   
683  0 if (StringUtils.isNotEmpty(mapping)) {
684  0 hasProcessedMapping |= (!INTERNAL.equals(mapping) && hasDynamicMapping
685    && store.injectCustomMapping(docName, mapping, context));
686  0 callback.processCustomMapping(store, docName, mapping, hasDynamicMapping);
687    }
688    }
689    } catch (Exception e) {
690  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
691    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
692    }
693  0 return hasProcessedMapping;
694    }
695    })) {
696  0 store.injectUpdatedCustomMappings(context);
697    }
698    }
699   
700    /**
701    * Generic procedure to convert identifiers with some protection against conflicting ids.
702    *
703    * @param map the conversion map
704    * @param callback the callback implementing the hibernate actions
705    * @throws XWikiException if an error occurs during convertion
706    */
 
707  0 toggle private void convertDbId(final Map<Long, Long> map, IdConversionHibernateCallback callback) throws XWikiException
708    {
709  0 int count = map.size() + 1;
710  0 while (!map.isEmpty() && count > map.size()) {
711  0 count = map.size();
712  0 for (Iterator<Map.Entry<Long, Long>> it = map.entrySet().iterator(); it.hasNext();) {
713  0 Map.Entry<Long, Long> entry = it.next();
714   
715  0 if (!map.containsKey(entry.getValue())) {
716  0 callback.setOldId(entry.getKey());
717  0 callback.setNewId(entry.getValue());
718   
719  0 try {
720  0 getStore().executeWrite(getXWikiContext(), callback);
721    } catch (Exception e) {
722  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
723    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName()
724    + " migration failed while converting ID from [" + entry.getKey()
725    + "] to [" + entry.getValue() + "]", e);
726    }
727  0 it.remove();
728    }
729    }
730    }
731   
732  0 if (!map.isEmpty()) {
733  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
734    XWikiException.ERROR_XWIKI_STORE_MIGRATION,
735    getName() + " migration failed. Unresolved circular reference during id migration.");
736    }
737    }
738   
739    /**
740    * Retrieve the list of table that store collections of the provided persisted class, and that need to be manually
741    * updated, since no cascaded update has been added for them.
742    *
743    * @param pClass the persisted class to analyse
744    * @return a list of dual string, the first is the table name, and the second is the key in that table.
745    */
 
746  0 toggle private List<String[]> getCollectionProperties(PersistentClass pClass)
747    {
748  0 List<String[]> list = new ArrayList<String[]>();
749   
750  0 if (pClass != null) {
751  0 for (org.hibernate.mapping.Collection coll : getCollection(pClass)) {
752  0 Table collTable = coll.getCollectionTable();
753  0 if (!this.fkTables.contains(collTable)) {
754  0 list.add(new String[] { collTable.getName(), getKeyColumnName(coll) });
755    }
756    }
757    }
758   
759  0 return list;
760    }
761   
762    /**
763    * Retrieve the list of table that store collections of the provided persisted class.
764    *
765    * @param pClass the persisted class to analyse
766    * @param all if false, return only collection that need manual updates,
767    * see {@link #getCollectionProperties(PersistentClass pClass)}
768    * @return a list of dual string, the first is the table name, and the second is the key in that table.
769    */
 
770  0 toggle private List<String[]> getCollectionProperties(PersistentClass pClass, boolean all)
771    {
772  0 List<String[]> list = new ArrayList<String[]>();
773   
774  0 if (pClass != null) {
775  0 for (org.hibernate.mapping.Collection coll : getCollection(pClass)) {
776  0 Table collTable = coll.getCollectionTable();
777  0 if (all || !this.fkTables.contains(collTable)) {
778  0 list.add(new String[] { collTable.getName(), getKeyColumnName(coll) });
779    }
780    }
781    }
782   
783  0 return list;
784    }
785   
786    /**
787    * Retrieve the list of collection properties of the provided persisted class.
788    *
789    * @param pClass the persisted class to analyse
790    * @return a list of hibernate collections
791    */
 
792  0 toggle private List<org.hibernate.mapping.Collection> getCollection(PersistentClass pClass)
793    {
794  0 List<org.hibernate.mapping.Collection> list = new ArrayList<org.hibernate.mapping.Collection>();
795   
796  0 if (pClass != null) {
797  0 @SuppressWarnings("unchecked")
798    Iterator<Property> it = pClass.getPropertyIterator();
799  0 while (it.hasNext()) {
800  0 Property property = it.next();
801  0 if (property.getType().isCollectionType()) {
802  0 list.add((org.hibernate.mapping.Collection) property.getValue());
803    }
804    }
805    }
806   
807  0 return list;
808    }
809   
810    /**
811    * get all table to process, including collections if needed.
812    *
813    * @param className the persistent class
814    * @return a list of pair of table name and key field name.
815    * @throws DataMigrationException on failure
816    */
 
817  0 toggle private List<String[]> getAllTableToProcess(String className) throws DataMigrationException
818    {
819  0 return getAllTableToProcess(className, null);
820    }
821   
822    /**
823    * get hibernate mapping of the given class or entity name.
824    *
825    * @param className the class or entity name
826    * @return a list of pair of table name and the property field name.
827    * @throws DataMigrationException if mapping cannot be found
828    */
 
829  0 toggle private PersistentClass getClassMapping(String className) throws DataMigrationException
830    {
831  0 PersistentClass pClass = this.configuration.getClassMapping(className);
832   
833  0 if (pClass == null) {
834  0 throw new DataMigrationException(
835    String.format("Could not migrate IDs for class [%s] : no hibernate mapping found. "
836    + "For example, this error commonly happens if you have copied a document defining an internally "
837    + "mapped class (like XWiki.XWikiPreferences) and never used the newly created class OR if you "
838    + "have forgotten to customize the hibernate mapping while using your own internally custom mapped "
839    + "class. In the first and most common case, to fix this issue and migrate your wiki, you should "
840    + "delete the offending and useless class definition or the whole document defining that class "
841    + "from your original wiki before the migration.",
842    className));
843    }
844   
845  0 return pClass;
846    }
847   
848    /**
849    * get all table to process, including collections if needed.
850    *
851    * @param className the class or entity name
852    * @param propertyName the name of the property for which the column name is returned
853    * @return a list of pair of table name and the property field name.
854    * @throws DataMigrationException on failure
855    */
 
856  0 toggle private List<String[]> getAllTableToProcess(String className, String propertyName) throws DataMigrationException
857    {
858  0 return getAllTableToProcess(getClassMapping(className), propertyName);
859    }
860   
861    /**
862    * get all table to process, including collections if needed.
863    *
864    * @param pClass the persistent class
865    * @param propertyName the name of the property for which the column name is returned
866    * @return a list of pair of table name and the property field name.
867    */
 
868  0 toggle private List<String[]> getAllTableToProcess(PersistentClass pClass, String propertyName)
869    {
870  0 List<String[]> list = new ArrayList<String[]>();
871   
872    // Add collection table that will not be updated by cascaded updates
873  0 list.addAll(getCollectionProperties(pClass));
874   
875    // Skip classes that will be updated by cascaded updates
876  0 if (!this.fkTables.contains(pClass.getTable())) {
877  0 list.add(new String[] { pClass.getTable().getName(), getColumnName(pClass, propertyName) });
878    }
879  0 return list;
880    }
881   
882    /**
883    * get name of the first column of the key of a given collection property.
884    *
885    * @param coll the collection property
886    * @return the column name of the key
887    */
 
888  0 toggle private String getKeyColumnName(org.hibernate.mapping.Collection coll)
889    {
890  0 return ((Column) coll.getKey().getColumnIterator().next()).getName();
891    }
892   
893    /**
894    * get name of the first column of the key of a given pClass.
895    *
896    * @param pClass the persistent class
897    * @return the column name of the key
898    */
 
899  0 toggle private String getKeyColumnName(PersistentClass pClass)
900    {
901  0 return getColumnName(pClass, null);
902    }
903   
904    /**
905    * get column name (first one) of a property of the given pClass.
906    *
907    * @param pClass the persistent class
908    * @param propertyName the name of the property, or null to return the first column of the key
909    * @return the column name of the property
910    */
 
911  0 toggle private String getColumnName(PersistentClass pClass, String propertyName)
912    {
913  0 if (propertyName != null) {
914  0 return ((Column) pClass.getProperty(propertyName).getColumnIterator().next()).getName();
915    }
916  0 return ((Column) pClass.getKey().getColumnIterator().next()).getName();
917    }
918   
 
919  0 toggle @Override
920    public void hibernateMigrate() throws DataMigrationException, XWikiException
921    {
922  0 final Map<Long, Long> docs = new HashMap<Long, Long>();
923  0 final List<String> customMappedClasses = new ArrayList<String>();
924  0 final Map<Long, Long> objs = new HashMap<Long, Long>();
925  0 final Queue<Map<Long, Long>> stats = new LinkedList<Map<Long, Long>>();
926   
927    // Get ids conversion list
928  0 getStore().executeRead(getXWikiContext(), new HibernateCallback<Object>()
929    {
 
930  0 toggle private void fillDocumentIdConversion(Session session, Map<Long, Long> map)
931    {
932  0 String database = getXWikiContext().getWikiId();
933  0 @SuppressWarnings("unchecked")
934    List<Object[]> results = session.createQuery(
935    "select doc.id, doc.space, doc.name, doc.defaultLanguage, doc.language from "
936    + XWikiDocument.class.getName() + " as doc").list();
937   
938  0 for (Object[] result : results) {
939  0 long oldId = (Long) result[0];
940  0 String space = (String) result[1];
941  0 String name = (String) result[2];
942  0 String defaultLanguage = (String) result[3];
943  0 String language = (String) result[4];
944   
945    // Use a real document, since we need the language to be appended.
946    // TODO: Change this when the locale is integrated
947  0 XWikiDocument doc = new XWikiDocument(new DocumentReference(database, space, name));
948  0 doc.setDefaultLanguage(defaultLanguage);
949  0 doc.setLanguage(language);
950  0 long newId = doc.getId();
951   
952  0 if (oldId != newId) {
953  0 map.put(oldId, newId);
954    }
955    }
956   
957  0 logProgress("Retrieved %d document IDs to be converted.", map.size());
958    }
959   
 
960  0 toggle private void fillObjectIdConversion(Session session, Map<Long, Long> map)
961    {
962  0 @SuppressWarnings("unchecked")
963    List<Object[]> results = session.createQuery(
964    "select obj.id, obj.name, obj.className, obj.number from " + BaseObject.class.getName()
965    + " as obj").list();
966  0 for (Object[] result : results) {
967  0 long oldId = (Long) result[0];
968  0 String docName = (String) result[1];
969  0 String className = (String) result[2];
970  0 Integer number = (Integer) result[3];
971   
972  0 BaseObjectReference objRef = new BaseObjectReference(
973    R40000XWIKI6990DataMigration.this.resolver.resolve(className), number,
974    R40000XWIKI6990DataMigration.this.resolver.resolve(docName));
975  0 long newId = Util.getHash(R40000XWIKI6990DataMigration.this.serializer.serialize(objRef));
976   
977  0 if (oldId != newId) {
978  0 map.put(oldId, newId);
979    }
980    }
981   
982  0 logProgress("Retrieved %d object IDs to be converted.", map.size());
983    }
984   
 
985  0 toggle private void fillCustomMappingMap(XWikiHibernateStore store, XWikiContext context)
986    throws XWikiException
987    {
988  0 processCustomMappings(store, new CustomMappingCallback()
989    {
 
990  0 toggle @Override
991    public void processCustomMapping(XWikiHibernateStore store, String name, String mapping,
992    boolean hasDynamicMapping) throws XWikiException
993    {
994  0 if (INTERNAL.equals(mapping) || hasDynamicMapping) {
995  0 customMappedClasses.add(name);
996    }
997    }
998    }, context);
999   
1000  0 logProgress("Retrieved %d custom mapped classes to be processed.", customMappedClasses.size());
1001    }
1002   
 
1003  0 toggle private void fillStatsConversionMap(Session session, Class<?> klass, Map<Long, Long> map)
1004    {
1005  0 @SuppressWarnings("unchecked")
1006    List<Object[]> results = session.createQuery(
1007    "select stats.id, stats.name, stats.number from " + klass.getName() + " as stats").list();
1008  0 for (Object[] result : results) {
1009  0 long oldId = (Long) result[0];
1010  0 String statsName = (String) result[1];
1011  0 Integer number = (Integer) result[2];
1012   
1013    // Do not try to convert broken records which would cause duplicated ids
1014  0 if (statsName != null && !statsName.startsWith(".") && !statsName.endsWith(".")) {
1015  0 long newId = R40000XWIKI6990DataMigration.this.statsIdComputer.getId(statsName, number);
1016   
1017  0 if (oldId != newId) {
1018  0 map.put(oldId, newId);
1019    }
1020    } else {
1021  0 R40000XWIKI6990DataMigration.this.logger.debug(
1022    "Skipping invalid statistical entry [{}] with name [{}]", oldId, statsName);
1023    }
1024    }
1025   
1026  0 String klassName = klass.getName().substring(klass.getName().lastIndexOf('.') + 1);
1027  0 logProgress("Retrieved %d %s statistics IDs to be converted.", map.size(),
1028    klassName.substring(0, klassName.length() - 5).toLowerCase());
1029    }
1030   
 
1031  0 toggle @Override
1032    public Object doInHibernate(Session session) throws XWikiException
1033    {
1034  0 try {
1035  0 fillDocumentIdConversion(session, docs);
1036   
1037  0 fillObjectIdConversion(session, objs);
1038   
1039    // Retrieve custom mapped classes
1040  0 if (getStore() instanceof XWikiHibernateStore) {
1041  0 fillCustomMappingMap((XWikiHibernateStore) getStore(), getXWikiContext());
1042    }
1043   
1044    // Retrieve statistics ID conversion
1045  0 for (Class<?> statsClass : STATS_CLASSES) {
1046  0 Map<Long, Long> map = new HashMap<Long, Long>();
1047  0 fillStatsConversionMap(session, statsClass, map);
1048  0 stats.add(map);
1049    }
1050   
1051  0 session.clear();
1052    } catch (Exception e) {
1053  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
1054    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName() + " migration failed", e);
1055    }
1056  0 return null;
1057    }
1058    });
1059   
1060    // Cache the configuration and the dialect
1061  0 this.configuration = getStore().getConfiguration();
1062  0 this.dialect = this.configuration.buildSettings().getDialect();
1063   
1064    // Check configuration for safe mode
1065    /* True if migration should use safe but slower non-bulk native updates. */
1066  0 boolean useSafeUpdates =
1067    "1".equals(getXWikiContext().getWiki().Param("xwiki.store.migration." + this.getName() + ".safemode", "0"));
1068   
1069    // Use safe mode if the database has no temporary table supported by hibernate
1070  0 useSafeUpdates = useSafeUpdates || !this.configuration.buildSettings().getDialect().supportsTemporaryTables();
1071   
1072    // Proceed to document id conversion
1073  0 if (!docs.isEmpty()) {
1074  0 if (!useSafeUpdates) {
1075    // Pair table,key for table that need manual updates
1076  0 final List<String[]> tableToProcess = new ArrayList<String[]>();
1077   
1078  0 for (Class<?> docClass : DOC_CLASSES) {
1079  0 tableToProcess.addAll(getAllTableToProcess(docClass.getName()));
1080    }
1081  0 for (Class<?> docClass : DOCLINK_CLASSES) {
1082  0 tableToProcess.addAll(getAllTableToProcess(docClass.getName(), "docId"));
1083    }
1084   
1085  0 logProgress("Converting %d document IDs in %d tables...", docs.size(), tableToProcess.size());
1086   
1087  0 final long[] times = new long[tableToProcess.size() + 1];
1088  0 try {
1089  0 getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback()
1090    {
 
1091  0 toggle @Override
1092    public void doBulkIdUpdate()
1093    {
1094  0 times[this.timer++] += insertIdUpdates(docs);
1095   
1096  0 for (String[] table : tableToProcess) {
1097  0 times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
1098    }
1099    }
1100    });
1101    } catch (Exception e) {
1102  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
1103    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName()
1104    + " migration failed", e);
1105    }
1106  0 if (this.logger.isDebugEnabled()) {
1107  0 int timer = 0;
1108  0 this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
1109   
1110  0 for (String[] table : tableToProcess) {
1111  0 this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
1112    }
1113    }
1114    } else {
1115  0 final List<String[]> docsColl = new ArrayList<String[]>();
1116  0 for (Class<?> docClass : DOC_CLASSES) {
1117  0 docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
1118    }
1119  0 for (Class<?> docClass : DOCLINK_CLASSES) {
1120  0 docsColl.addAll(getCollectionProperties(getClassMapping(docClass.getName())));
1121    }
1122   
1123  0 logProgress("Converting %d document IDs in %d tables and %d collection tables...",
1124    docs.size(), DOC_CLASSES.length + DOCLINK_CLASSES.length, docsColl.size());
1125   
1126  0 final long[] times = new long[DOC_CLASSES.length + DOCLINK_CLASSES.length + docsColl.size()];
1127  0 convertDbId(docs, new AbstractIdConversionHibernateCallback()
1128    {
 
1129  0 toggle @Override
1130    public void doSingleUpdate()
1131    {
1132  0 for (String[] coll : docsColl) {
1133  0 times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
1134    }
1135   
1136  0 for (Class<?> doclinkClass : DOCLINK_CLASSES) {
1137  0 times[this.timer++] += executeIdUpdate(doclinkClass, DOCID);
1138    }
1139  0 times[this.timer++] += executeIdUpdate(XWikiLink.class, DOCID);
1140  0 times[this.timer++] += executeIdUpdate(XWikiRCSNodeInfo.class, ID + '.' + DOCID);
1141  0 times[this.timer++] += executeIdUpdate(XWikiDocument.class, ID);
1142    }
1143    });
1144  0 if (this.logger.isDebugEnabled()) {
1145  0 int timer = 0;
1146  0 for (String[] coll : docsColl) {
1147  0 this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
1148    }
1149  0 for (Class<?> doclinkClass : DOCLINK_CLASSES) {
1150  0 this.logger.debug("Time elapsed for {} class: {} ms", doclinkClass.getName(),
1151    times[timer++] / 1000000);
1152    }
1153  0 this.logger.debug("Time elapsed for {} class: {} ms", XWikiRCSNodeInfo.class.getName(),
1154    times[timer++] / 1000000);
1155  0 this.logger.debug("Time elapsed for {} class: {} ms", XWikiDocument.class.getName(),
1156    times[timer++] / 1000000);
1157    }
1158    }
1159  0 logProgress("All document IDs has been converted successfully.");
1160    } else {
1161  0 logProgress("No document IDs to convert, skipping.");
1162    }
1163   
1164    // Proceed to object id conversion
1165  0 if (!objs.isEmpty()) {
1166  0 if (!useSafeUpdates) {
1167    // Pair table,key for table that need manual updates
1168  0 final List<String[]> tableToProcess = new ArrayList<String[]>();
1169   
1170  0 PersistentClass objklass = getClassMapping(BaseObject.class.getName());
1171  0 tableToProcess.addAll(getCollectionProperties(objklass));
1172   
1173  0 for (Class<?> propertyClass : PROPERTY_CLASS) {
1174  0 tableToProcess.addAll(getAllTableToProcess(propertyClass.getName()));
1175    }
1176  0 for (String customClass : customMappedClasses) {
1177  0 tableToProcess.addAll(getAllTableToProcess(customClass));
1178    }
1179  0 tableToProcess.add(new String[] { objklass.getTable().getName(), getKeyColumnName(objklass) });
1180   
1181  0 logProgress("Converting %d object IDs in %d tables...", objs.size(), tableToProcess.size());
1182   
1183  0 final long[] times = new long[tableToProcess.size() + 1];
1184  0 try {
1185  0 getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback()
1186    {
 
1187  0 toggle @Override
1188    public void doBulkIdUpdate()
1189    {
1190  0 times[this.timer++] += insertIdUpdates(objs);
1191   
1192  0 for (String[] table : tableToProcess) {
1193  0 times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
1194    }
1195    }
1196    });
1197    } catch (Exception e) {
1198  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
1199    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName()
1200    + " migration failed", e);
1201    }
1202  0 if (this.logger.isDebugEnabled()) {
1203  0 int timer = 0;
1204  0 this.logger.debug("Time elapsed for inserts: {} ms", times[timer++] / 1000000);
1205   
1206  0 for (String[] table : tableToProcess) {
1207  0 this.logger.debug("Time elapsed for {} table: {} ms", table[0], times[timer++] / 1000000);
1208    }
1209    }
1210    } else {
1211    // Name of classes that need manual updates
1212  0 final List<String> classToProcess = new ArrayList<String>();
1213    // Name of custom classes that need manual updates
1214  0 final List<String> customClassToProcess = new ArrayList<String>();
1215    // Pair table,key for collection table that need manual updates
1216  0 final List<String[]> objsColl = new ArrayList<String[]>();
1217   
1218  0 objsColl.addAll(getCollectionProperties(getClassMapping(BaseObject.class.getName())));
1219  0 for (Class<?> propertyClass : PROPERTY_CLASS) {
1220  0 String className = propertyClass.getName();
1221  0 PersistentClass klass = getClassMapping(className);
1222   
1223    // Add collection table that will not be updated by cascaded updates
1224  0 objsColl.addAll(getCollectionProperties(klass));
1225   
1226    // Skip classes that will be updated by cascaded updates
1227  0 if (!this.fkTables.contains(klass.getTable())) {
1228  0 classToProcess.add(className);
1229    }
1230    }
1231  0 for (String customClass : customMappedClasses) {
1232  0 PersistentClass klass = getClassMapping(customClass);
1233   
1234    // Add collection table that will not be updated by cascaded updates
1235  0 objsColl.addAll(getCollectionProperties(klass));
1236   
1237    // Skip classes that will be updated by cascaded updates
1238  0 if (!this.fkTables.contains(klass.getTable())) {
1239  0 customClassToProcess.add(customClass);
1240    }
1241    }
1242   
1243  0 logProgress(
1244    "Converting %d object IDs in %d tables, %d custom mapped tables and %d collection tables...",
1245    objs.size(), classToProcess.size() + 1, customClassToProcess.size(), objsColl.size());
1246   
1247  0 final long[] times =
1248    new long[classToProcess.size() + 1 + customClassToProcess.size() + objsColl.size()];
1249  0 convertDbId(objs, new AbstractIdConversionHibernateCallback()
1250    {
 
1251  0 toggle @Override
1252    public void doSingleUpdate()
1253    {
1254  0 for (String[] coll : objsColl) {
1255  0 times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
1256    }
1257   
1258  0 for (String customMappedClass : customClassToProcess) {
1259  0 times[this.timer++] += executeIdUpdate(customMappedClass, ID);
1260    }
1261   
1262  0 for (String propertyClass : classToProcess) {
1263  0 times[this.timer++] += executeIdUpdate(propertyClass, IDID);
1264    }
1265   
1266  0 times[this.timer++] += executeIdUpdate(BaseObject.class, ID);
1267    }
1268    });
1269  0 if (this.logger.isDebugEnabled()) {
1270  0 int timer = 0;
1271  0 for (String[] coll : objsColl) {
1272  0 this.logger.debug("Time elapsed for {} collection: {} ms", coll[0], times[timer++] / 1000000);
1273    }
1274  0 for (String customMappedClass : customClassToProcess) {
1275  0 this.logger.debug("Time elapsed for {} custom table: {} ms", customMappedClass,
1276    times[timer++] / 1000000);
1277    }
1278  0 for (String propertyClass : classToProcess) {
1279  0 this.logger.debug("Time elapsed for {} property table: {} ms", propertyClass,
1280    times[timer++] / 1000000);
1281    }
1282  0 this.logger.debug("Time elapsed for {} class: {} ms", BaseObject.class.getName(),
1283    times[timer++] / 1000000);
1284    }
1285    }
1286  0 logProgress("All object IDs has been converted successfully.");
1287    } else {
1288  0 logProgress("No object IDs to convert, skipping.");
1289    }
1290   
1291    // Proceed to statistics id conversions
1292  0 for (final Class<?> statsClass : STATS_CLASSES) {
1293   
1294  0 Map<Long, Long> map = stats.poll();
1295  0 String klassName = statsClass.getName().substring(statsClass.getName().lastIndexOf('.') + 1);
1296  0 klassName = klassName.substring(0, klassName.length() - 5).toLowerCase();
1297   
1298  0 if (!map.isEmpty()) {
1299  0 if (!useSafeUpdates) {
1300  0 final List<String[]> tableToProcess = new ArrayList<String[]>();
1301  0 final Map<Long, Long> statids = map;
1302   
1303  0 PersistentClass statklass = getClassMapping(statsClass.getName());
1304  0 tableToProcess.addAll(getCollectionProperties(statklass));
1305  0 tableToProcess.add(new String[] { statklass.getTable().getName(), getKeyColumnName(statklass) });
1306   
1307  0 logProgress("Converting %d %s statistics IDs in %d tables...",
1308    map.size(), klassName, tableToProcess.size());
1309   
1310  0 final long[] times = new long[tableToProcess.size() + 1];
1311  0 try {
1312  0 getStore().executeWrite(getXWikiContext(), new AbstractBulkIdConversionHibernateCallback()
1313    {
 
1314  0 toggle @Override
1315    public void doBulkIdUpdate()
1316    {
1317  0 times[this.timer++] += insertIdUpdates(statids);
1318   
1319  0 for (String[] table : tableToProcess) {
1320  0 times[this.timer++] += executeSqlIdUpdate(table[0], table[1]);
1321    }
1322    }
1323    });
1324    } catch (Exception e) {
1325  0 throw new XWikiException(XWikiException.MODULE_XWIKI_STORE,
1326    XWikiException.ERROR_XWIKI_STORE_MIGRATION, getName()
1327    + " migration failed", e);
1328    }
1329  0 if (this.logger.isDebugEnabled()) {
1330  0 int timer = 0;
1331  0 this.logger.debug("Time elapsed for inserts: {} ms",
1332    times[timer++] / 1000000);
1333   
1334  0 for (String[] table : tableToProcess) {
1335  0 this.logger.debug("Time elapsed for {} table: {} ms", table[0],
1336    times[timer++] / 1000000);
1337    }
1338    }
1339    } else {
1340  0 final List<String[]> statsColl = new ArrayList<String[]>();
1341  0 statsColl.addAll(getCollectionProperties(getClassMapping(statsClass.getName())));
1342   
1343  0 logProgress("Converting %d %s statistics IDs in 1 tables and %d collection tables...",
1344    map.size(), klassName, statsColl.size());
1345   
1346  0 final long[] times = new long[statsColl.size() + 1];
1347  0 convertDbId(map, new AbstractIdConversionHibernateCallback()
1348    {
 
1349  0 toggle @Override
1350    public void doSingleUpdate()
1351    {
1352  0 for (String[] coll : statsColl) {
1353  0 times[this.timer++] += executeSqlIdUpdate(coll[0], coll[1]);
1354    }
1355  0 times[this.timer++] += executeIdUpdate(statsClass, ID);
1356    }
1357    });
1358  0 if (this.logger.isDebugEnabled()) {
1359  0 int timer = 0;
1360  0 for (String[] coll : statsColl) {
1361  0 this.logger.debug("Time elapsed for {} collection: {} ms", coll[0],
1362    times[timer++] / 1000000);
1363    }
1364  0 this.logger.debug("Time elapsed for {} class: {} ms", statsClass.getName(),
1365    times[timer++] / 1000000);
1366    }
1367    }
1368  0 logProgress("All %s statistics IDs has been converted successfully.", klassName);
1369    } else {
1370  0 logProgress("No %s statistics IDs to convert, skipping.", klassName);
1371    }
1372    }
1373    }
1374   
1375    /**
1376    * Append a drop primary key constraint command for the given table.
1377    *
1378    * @param sb append the result into this string builder
1379    * @param table the table
1380    */
 
1381  0 toggle private void appendDropPrimaryKey(StringBuilder sb, Table table)
1382    {
1383  0 final String tableName = table.getName();
1384  0 String pkName = table.getPrimaryKey().getName();
1385   
1386    // MS-SQL require a constraints name, and the one provided from the mapping is necessarily appropriate
1387    // since during database creation, that name has not been used, and a name has been assigned by the
1388    // database itself. We need to retrieve that name from the schema.
1389  0 if (this.isMSSQL) {
1390  0 try {
1391  0 pkName = getStore().failSafeExecuteRead(getXWikiContext(), new HibernateCallback<String>()
1392    {
 
1393  0 toggle @Override
1394    public String doInHibernate(Session session) throws HibernateException
1395    {
1396    // Retrieve the constraint name from the database
1397  0 return (String) session.createSQLQuery(
1398    "SELECT CONSTRAINT_NAME FROM INFORMATION_SCHEMA.TABLE_CONSTRAINTS"
1399    + " WHERE TABLE_NAME = :tableName AND CONSTRAINT_TYPE = 'PRIMARY KEY'")
1400    .setString("tableName", tableName)
1401    .uniqueResult();
1402    }
1403    });
1404    } catch (Exception e) {
1405    // ignored since it is really unlikely to happen
1406  0 this.logger.debug("Fail retrieving the primary key constraints name", e);
1407    }
1408    }
1409   
1410  0 sb.append(" <dropPrimaryKey tableName=\"").append(tableName);
1411   
1412  0 if (pkName != null) {
1413  0 sb.append("\" constraintName=\"").append(pkName);
1414    }
1415   
1416  0 sb.append("\"/>\n");
1417    }
1418   
1419    /**
1420    * Append a add primary key constraint command for the given table.
1421    *
1422    * @param sb append the result into this string builder
1423    * @param table the table name
1424    */
 
1425  0 toggle private void appendAddPrimaryKey(StringBuilder sb, Table table)
1426    {
1427  0 PrimaryKey pk = table.getPrimaryKey();
1428  0 String pkName = pk.getName();
1429   
1430  0 sb.append(" <addPrimaryKey tableName=\"").append(table.getName())
1431    .append("\" columnNames=\"");
1432   
1433  0 @SuppressWarnings("unchecked")
1434    Iterator<Column> columns = pk.getColumnIterator();
1435  0 while (columns.hasNext()) {
1436  0 Column column = columns.next();
1437  0 sb.append(column.getName());
1438  0 if (columns.hasNext()) {
1439  0 sb.append(",");
1440    }
1441    }
1442   
1443  0 if (pkName != null) {
1444  0 sb.append("\" constraintName=\"").append(pkName);
1445    }
1446   
1447  0 sb.append("\"/>\n");
1448    }
1449   
1450    /**
1451    * Append a drop index command for the given index.
1452    *
1453    * @param sb append the result into this string builder
1454    * @param index the index
1455    */
 
1456  0 toggle private void appendDropIndex(StringBuilder sb, Index index)
1457    {
1458  0 sb.append(" <dropIndex indexName=\"").append(index.getName())
1459    .append("\" tableName=\"").append(index.getTable().getName())
1460    .append("\"/>\n");
1461    }
1462   
1463    /**
1464    * Append a add index command for the given index.
1465    *
1466    * @param sb append the result into this string builder
1467    * @param index the index
1468    */
 
1469  0 toggle private void appendAddIndex(StringBuilder sb, Index index)
1470    {
1471  0 sb.append(" <createIndex tableName=\"").append(index.getTable().getName())
1472    .append("\" indexName=\"").append(index.getName()).append("\">\n");
1473   
1474  0 @SuppressWarnings("unchecked")
1475    Iterator<Column> columns = index.getColumnIterator();
1476  0 while (columns.hasNext()) {
1477  0 Column column = columns.next();
1478  0 sb.append(" <column name=\"").append(column.getName()).append("\"/>\n");
1479    }
1480   
1481  0 sb.append("</createIndex>\n");
1482    }
1483   
1484    /**
1485    * Append a modify data type to BIGINT command for the given column and table.
1486    *
1487    * @param sb append the result into this string builder
1488    * @param table the table name
1489    * @param column the column name
1490    */
 
1491  0 toggle private void appendModifyColumn(StringBuilder sb, String table, String column)
1492    {
1493  0 sb.append(" <modifyDataType tableName=\"").append(table)
1494    .append("\" columnName=\"").append(column)
1495    .append("\" newDataType=\"BIGINT\"/>\n");
1496   
1497    // MS-SQL drop the NOT NULL constraints while modifying datatype, so we add it back
1498  0 if (this.isMSSQL) {
1499  0 sb.append(" <addNotNullConstraint tableName=\"").append(table)
1500    .append("\" columnName=\"").append(column)
1501    .append("\" columnDataType=\"BIGINT\"/>\n");
1502    }
1503    }
1504   
1505    /**
1506    * Create liquibase change log to modify the column type to BIGINT. If the database is MSSQL, drop PK constraints
1507    * and indexes during operation.
1508    *
1509    * @param sb append the result into this string builder
1510    * @param table the table name
1511    * @param column the column name
1512    */
 
1513  0 toggle private void appendDataTypeChangeLog(StringBuilder sb, Table table, String column)
1514    {
1515  0 String tableName = table.getName();
1516   
1517  0 sb.append(" <changeSet id=\"R").append(this.getVersion().getVersion())
1518    .append('-').append(Util.getHash(String.format("modifyDataType-%s-%s", table, column)))
1519    .append("\" author=\"xwiki\">\n")
1520    .append(" <comment>Upgrade identifier [").append(column).append("] from table [").append(tableName)
1521    .append("] to BIGINT type</comment >\n");
1522   
1523    // MS-SQL require that primary key constraints and all indexes related to the changed column be dropped before
1524    // changing the column type.
1525  0 if (this.isMSSQL) {
1526  0 if (table.hasPrimaryKey()) {
1527  0 appendDropPrimaryKey(sb, table);
1528    }
1529   
1530    // We drop all index related to the table, this is overkill, but does not hurt
1531  0 for (@SuppressWarnings("unchecked") Iterator<Index> it = table.getIndexIterator(); it.hasNext();) {
1532  0 Index index = it.next();
1533  0 appendDropIndex(sb, index);
1534    }
1535    }
1536   
1537  0 appendModifyColumn(sb, tableName, column);
1538   
1539    // Add back dropped PK constraints and indexes for MS-SQL
1540  0 if (this.isMSSQL) {
1541  0 if (table.hasPrimaryKey()) {
1542  0 appendAddPrimaryKey(sb, table);
1543    }
1544   
1545  0 for (@SuppressWarnings("unchecked") Iterator<Index> it = table.getIndexIterator(); it.hasNext();) {
1546  0 Index index = it.next();
1547  0 appendAddIndex(sb, index);
1548    }
1549    }
1550   
1551  0 sb.append(" </changeSet>\n");
1552  0 this.logCount++;
1553    }
1554   
1555    /**
1556    * Append change log to fix identifier type of a given persistent class. Collection table storing properties of this
1557    * persistent class will also be updated.
1558    *
1559    * @param sb the string builder to append to
1560    * @param pClass the persistent class to process
1561    */
 
1562  0 toggle private void appendDataTypeChangeLogs(StringBuilder sb, PersistentClass pClass)
1563    {
1564  0 if (pClass != null) {
1565  0 appendDataTypeChangeLog(sb, pClass.getTable(), getKeyColumnName(pClass));
1566   
1567    // Update identifiers in ALL collection tables
1568  0 for (org.hibernate.mapping.Collection coll : getCollection(pClass)) {
1569  0 appendDataTypeChangeLog(sb, coll.getCollectionTable(), getKeyColumnName(coll));
1570    }
1571    }
1572    }
1573   
1574    /**
1575    * Check that a table contains at least a foreign key that refer to a primary key in its reference table.
1576    *
1577    * @param table the table to analyse
1578    * @return true if the table contains at least a FK that refer to a PK
1579    */
 
1580  0 toggle private boolean checkFKtoPKinTable(Table table)
1581    {
1582  0 @SuppressWarnings("unchecked")
1583    Iterator<ForeignKey> fki = table.getForeignKeyIterator();
1584  0 while (fki.hasNext()) {
1585  0 ForeignKey fk = fki.next();
1586  0 if (fk.isReferenceToPrimaryKey()) {
1587  0 return true;
1588    }
1589    }
1590  0 return false;
1591    }
1592   
1593    /**
1594    * Retrieve a list of tables used to store the given persistent class, that need to be processed for FK constraints.
1595    * The list include the main table use to persist the class, if this table has FK, as well as, all the collection
1596    * table used for storing this persisted class properties.
1597    *
1598    * @param pClass the persistent class to analyze
1599    * @return a list of table
1600    */
 
1601  0 toggle private List<Table> getForeignKeyTables(PersistentClass pClass)
1602    {
1603  0 List<Table> list = new ArrayList<Table>();
1604   
1605  0 if (pClass != null) {
1606  0 Table table = pClass.getTable();
1607  0 if (checkFKtoPKinTable(table)) {
1608  0 list.add(table);
1609    }
1610   
1611  0 @SuppressWarnings("unchecked")
1612    Iterator<Property> it = pClass.getPropertyIterator();
1613  0 while (it.hasNext()) {
1614  0 Property property = it.next();
1615  0 if (property.getType().isCollectionType()) {
1616  0 org.hibernate.mapping.Collection coll = (org.hibernate.mapping.Collection) property.getValue();
1617  0 Table collTable = coll.getCollectionTable();
1618  0 if (checkFKtoPKinTable(collTable)) {
1619  0 list.add(collTable);
1620    }
1621    }
1622    }
1623    }
1624   
1625  0 return list;
1626    }
1627   
1628    /**
1629    * Append commands to drop all foreign keys of a given table.
1630    *
1631    * @param sb the string builder to append to
1632    * @param table the table to process
1633    */
 
1634  0 toggle @SuppressWarnings("unchecked")
1635    private void appendDropForeignKeyChangeLog(StringBuilder sb, Table table)
1636    {
1637  0 Iterator<ForeignKey> fki = table.getForeignKeyIterator();
1638   
1639    // Preamble
1640  0 String tableName = table.getName();
1641  0 sb.append(" <changeSet id=\"R").append(this.getVersion().getVersion())
1642    .append('-').append(Util.getHash(String.format("dropForeignKeyConstraint-%s", tableName)))
1643    .append("\" author=\"xwiki\" runOnChange=\"true\" runAlways=\"true\" failOnError=\"false\">\n")
1644    .append(" <comment>Drop foreign keys on table [").append(tableName).append("]</comment>\n");
1645   
1646    // Concrete Property types should each have a foreign key referencing the BaseProperty
1647    // Other classes don't have any foreign keys at all, in which case the fast exit path above was used
1648  0 while (fki.hasNext()) {
1649  0 ForeignKey fk = fki.next();
1650    // Drop the old constraint
1651  0 if (fk.isReferenceToPrimaryKey()) {
1652  0 sb.append(" <dropForeignKeyConstraint baseTableName=\"")
1653    .append(tableName)
1654    .append("\" constraintName=\"").append(fk.getName()).append("\" />\n");
1655    }
1656    }
1657    // All done!
1658  0 sb.append(" </changeSet>\n");
1659  0 this.logCount++;
1660    }
1661   
1662    /**
1663    * Append change log to add foreign keys with CASCADEd updates.
1664    *
1665    * @param sb the string builder to append to the add tasks
1666    * @param table the table to process
1667    */
 
1668  0 toggle @SuppressWarnings("unchecked")
1669    private void appendAddForeignKeyChangeLog(StringBuilder sb, Table table)
1670    {
1671  0 Iterator<ForeignKey> fki = table.getForeignKeyIterator();
1672   
1673    // Preamble
1674  0 String tableName = table.getName();
1675  0 sb.append(" <changeSet id=\"R").append(this.getVersion().getVersion())
1676    .append('-').append(Util.getHash(String.format("addForeignKeyConstraint-%s", tableName)))
1677    .append("\" author=\"xwiki\" runOnChange=\"true\" runAlways=\"true\">\n")
1678    .append(" <comment>Add foreign keys on table [").append(tableName)
1679    .append("] to use ON UPDATE CASCADE</comment>\n");
1680   
1681    // Concrete Property types should each have a foreign key referencing the BaseProperty
1682    // Other classes don't have any foreign keys at all, in which case the fast exit path above was used
1683  0 while (fki.hasNext()) {
1684  0 ForeignKey fk = fki.next();
1685   
1686  0 if (fk.isReferenceToPrimaryKey()) {
1687    // Recreate the constraint
1688  0 sb.append(" <addForeignKeyConstraint constraintName=\"").append(fk.getName()).append(
1689    "\" baseTableName=\"").append(tableName).append("\" baseColumnNames=\"");
1690   
1691    // Reuse the data from the old foreign key
1692    // Columns in the current table
1693  0 Iterator<Column> columns = fk.getColumnIterator();
1694  0 while (columns.hasNext()) {
1695  0 Column column = columns.next();
1696  0 sb.append(column.getName());
1697  0 if (columns.hasNext()) {
1698  0 sb.append(",");
1699    }
1700    }
1701  0 sb.append("\" referencedTableName=\"").append(fk.getReferencedTable().getName()).append(
1702    "\" referencedColumnNames=\"");
1703   
1704    // Columns in the referenced table
1705  0 columns = fk.getReferencedTable().getPrimaryKey().getColumnIterator();
1706  0 while (columns.hasNext()) {
1707  0 Column column = columns.next();
1708  0 sb.append(column.getName());
1709  0 if (columns.hasNext()) {
1710  0 sb.append(",");
1711    }
1712    }
1713   
1714    // The important part: cascaded updates
1715  0 if (this.isOracle) {
1716    // Oracle doesn't support cascaded updates, but allow the constraint to be checked
1717    // at the commit level (normal checking is done at the statement level).
1718  0 sb.append("\" initiallyDeferred=\"true\"/>\n");
1719    } else {
1720  0 sb.append("\" onUpdate=\"CASCADE\"/>\n");
1721    }
1722    }
1723    }
1724    // All done!
1725  0 sb.append(" </changeSet>\n");
1726  0 this.logCount++;
1727    }
1728   
1729    /**
1730    * Detect database products and initialize isMySQLMyISAM and isOracle.
1731    * isMySQLMyISAM is true if the xwikidoc table use the MyISAM engine in MySQL, false otherwise or on any failure.
1732    * isOracle is true if the we access an Oracle database.
1733    *
1734    * @param store the store to be checked
1735    */
 
1736  0 toggle private void detectDatabaseProducts(XWikiHibernateBaseStore store)
1737    {
1738  0 DatabaseProduct product = store.getDatabaseProductName();
1739  0 if (product != DatabaseProduct.MYSQL) {
1740  0 this.isOracle = (product == DatabaseProduct.ORACLE);
1741  0 this.isMSSQL = (product == DatabaseProduct.MSSQL);
1742  0 return;
1743    }
1744   
1745  0 this.isMySQL = true;
1746   
1747  0 String createTable = store.failSafeExecuteRead(getXWikiContext(),
1748    new HibernateCallback<String>()
1749    {
 
1750  0 toggle @Override
1751    public String doInHibernate(Session session) throws HibernateException
1752    {
1753  0 Query query = session.createSQLQuery("SHOW TABLE STATUS like 'xwikidoc'");
1754  0 return (String) ((Object[]) query.uniqueResult())[1];
1755    }
1756    });
1757   
1758  0 this.isMySQLMyISAM = (createTable != null && createTable.equals("MyISAM"));
1759    }
1760   
 
1761  0 toggle @Override
1762    public String getLiquibaseChangeLog() throws DataMigrationException
1763    {
1764  0 final XWikiHibernateBaseStore store = getStore();
1765  0 this.configuration = store.getConfiguration();
1766  0 final StringBuilder sb = new StringBuilder(12000);
1767  0 final List<PersistentClass> classes = new ArrayList<PersistentClass>();
1768   
1769  0 detectDatabaseProducts(store);
1770   
1771  0 if (this.logger.isDebugEnabled()) {
1772  0 if (this.isOracle) {
1773  0 this.logger
1774    .debug("Oracle database detected, proceeding to all updates manually with deferred constraints.");
1775    }
1776  0 if (this.isMySQL && !this.isMySQLMyISAM) {
1777  0 this.logger
1778    .debug("MySQL innoDB database detected, proceeding to simplified updates with cascaded updates.");
1779    }
1780  0 if (this.isMySQLMyISAM) {
1781  0 this.logger
1782    .debug("MySQL MyISAM database detected, proceeding to all updates manually without constraints.");
1783    }
1784  0 if (this.isMSSQL) {
1785  0 this.logger
1786    .debug("Microsoft SQL Server database detected, proceeding to simplified updates with cascaded u"
1787    + "pdates. During data type changes, Primary Key constraints and indexes are temporarily dropped.");
1788    }
1789    }
1790   
1791    // Build the list of classes to check for updates
1792  0 classes.add(getClassMapping(BaseObject.class.getName()));
1793  0 for (Class<?> klass : PROPERTY_CLASS) {
1794  0 classes.add(getClassMapping(klass.getName()));
1795    }
1796  0 for (Class<?> klass : STATS_CLASSES) {
1797  0 classes.add(getClassMapping(klass.getName()));
1798    }
1799   
1800    // Initialize the counter of Change Logs
1801  0 this.logCount = 0;
1802   
1803    // Manual updates of PK and FK will fails if any FK constraints are active on these keys in most decent DBs.
1804    // Since Hibernate does not activate cascaded updates, we need to rewrite FK constrains. Moreover, some DBs
1805    // does not allow changing field types of fields involved in FK constraints, so we will drop FK constraints
1806    // during type updates.
1807    // Since FK constraints does not fail in MySQL on a MyISAM table, but MyISAM do not support FK constraints, and
1808    // do not prevent type changes, we skip all this processing for MySQL table stored using the MyISAM engine.
1809  0 if (!this.isMySQLMyISAM) {
1810  0 for (PersistentClass klass : classes) {
1811  0 this.fkTables.addAll(getForeignKeyTables(klass));
1812    }
1813    }
1814   
1815    // Drop all FK constraints
1816  0 for (Table table : this.fkTables) {
1817  0 appendDropForeignKeyChangeLog(sb, table);
1818    }
1819   
1820    // Process internal classes
1821  0 for (PersistentClass klass : classes) {
1822    // The same table mapped for StringListProperty and LargeStringProperty
1823  0 if (klass.getMappedClass() != StringListProperty.class) {
1824    // Update key types
1825  0 appendDataTypeChangeLogs(sb, klass);
1826    }
1827    }
1828   
1829    // Process dynamic and custom mapping
1830  0 final XWikiContext context = getXWikiContext();
1831   
1832  0 try {
1833  0 processCustomMappings((XWikiHibernateStore) store, new CustomMappingCallback()
1834    {
 
1835  0 toggle @Override
1836    public void processCustomMapping(XWikiHibernateStore store, String name, String mapping,
1837    boolean hasDynamicMapping) throws XWikiException
1838    {
1839  0 if (INTERNAL.equals(mapping) || hasDynamicMapping) {
1840  0 PersistentClass klass = R40000XWIKI6990DataMigration.this.configuration.getClassMapping(name);
1841  0 if (!R40000XWIKI6990DataMigration.this.isMySQLMyISAM) {
1842  0 List<Table> tables = getForeignKeyTables(klass);
1843  0 for (Table table : tables) {
1844  0 if (!R40000XWIKI6990DataMigration.this.fkTables.contains(table)) {
1845    // Drop FK constraints for custom mapped class
1846  0 appendDropForeignKeyChangeLog(sb, table);
1847  0 R40000XWIKI6990DataMigration.this.fkTables.add(table);
1848    }
1849    }
1850    }
1851   
1852    // Update key types for custom mapped class
1853  0 appendDataTypeChangeLogs(sb, klass);
1854    }
1855    }
1856    }, context);
1857    } catch (XWikiException e) {
1858  0 throw new DataMigrationException("Unable to process custom mapped classes during schema updated", e);
1859    }
1860   
1861    // Add FK constraints back, activating cascaded updates
1862  0 for (Table table : this.fkTables) {
1863  0 appendAddForeignKeyChangeLog(sb, table);
1864    }
1865   
1866    // Oracle doesn't support cascaded updates, so we still need to manually update each table
1867  0 if (this.isOracle) {
1868  0 this.fkTables.clear();
1869    }
1870   
1871  0 logProgress("%d schema updates required.", this.logCount);
1872  0 if (this.logger.isDebugEnabled()) {
1873  0 this.logger.debug("About to execute this Liquibase XML: {}", sb.toString());
1874    }
1875  0 return sb.toString();
1876    }
1877    }