001
014
015 package com.liferay.portal.tools.samplesqlbuilder;
016
017 import com.liferay.portal.dao.db.MySQLDB;
018 import com.liferay.portal.freemarker.FreeMarkerUtil;
019 import com.liferay.portal.kernel.dao.db.DB;
020 import com.liferay.portal.kernel.dao.db.DBFactoryUtil;
021 import com.liferay.portal.kernel.io.CharPipe;
022 import com.liferay.portal.kernel.io.OutputStreamWriter;
023 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedReader;
024 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedWriter;
025 import com.liferay.portal.kernel.io.unsync.UnsyncTeeWriter;
026 import com.liferay.portal.kernel.util.DateUtil_IW;
027 import com.liferay.portal.kernel.util.FileUtil;
028 import com.liferay.portal.kernel.util.GetterUtil;
029 import com.liferay.portal.kernel.util.ListUtil;
030 import com.liferay.portal.kernel.util.StringBundler;
031 import com.liferay.portal.kernel.util.StringPool;
032 import com.liferay.portal.kernel.util.StringUtil_IW;
033 import com.liferay.portal.model.Company;
034 import com.liferay.portal.model.Contact;
035 import com.liferay.portal.model.Group;
036 import com.liferay.portal.model.Layout;
037 import com.liferay.portal.model.Role;
038 import com.liferay.portal.model.User;
039 import com.liferay.portal.tools.ArgumentsUtil;
040 import com.liferay.portal.util.InitUtil;
041 import com.liferay.portlet.blogs.model.BlogsEntry;
042 import com.liferay.portlet.documentlibrary.model.DLFileEntry;
043 import com.liferay.portlet.documentlibrary.model.DLFolder;
044 import com.liferay.portlet.dynamicdatalists.model.DDLRecord;
045 import com.liferay.portlet.dynamicdatalists.model.DDLRecordSet;
046 import com.liferay.portlet.dynamicdatamapping.model.DDMStructure;
047 import com.liferay.portlet.messageboards.model.MBCategory;
048 import com.liferay.portlet.messageboards.model.MBMessage;
049 import com.liferay.portlet.wiki.model.WikiNode;
050 import com.liferay.portlet.wiki.model.WikiPage;
051 import com.liferay.util.SimpleCounter;
052
053 import java.io.File;
054 import java.io.FileInputStream;
055 import java.io.FileOutputStream;
056 import java.io.FileWriter;
057 import java.io.IOException;
058 import java.io.Reader;
059 import java.io.Writer;
060
061 import java.nio.channels.FileChannel;
062
063 import java.util.ArrayList;
064 import java.util.Collections;
065 import java.util.HashMap;
066 import java.util.List;
067 import java.util.Map;
068 import java.util.Set;
069 import java.util.concurrent.ConcurrentHashMap;
070
071
075 public class SampleSQLBuilder {
076
077 public static void main(String[] args) {
078 Map<String, String> arguments = ArgumentsUtil.parseArguments(args);
079
080 InitUtil.initWithSpring();
081
082 String baseDir = arguments.get("sample.sql.base.dir");
083 String dbType = arguments.get("sample.sql.db.type");
084 int maxBlogsEntryCommentCount = GetterUtil.getInteger(
085 arguments.get("sample.sql.max.blogs.entry.comment.count"));
086 int maxBlogsEntryCount = GetterUtil.getInteger(
087 arguments.get("sample.sql.max.blogs.entry.count"));
088 int maxDDLRecordCount = GetterUtil.getInteger(
089 arguments.get("sample.sql.max.ddl.record.count"));
090 int maxDDLRecordSetCount = GetterUtil.getInteger(
091 arguments.get("sample.sql.max.ddl.record.set.count"));
092 int maxDLFileEntryCount = GetterUtil.getInteger(
093 arguments.get("sample.sql.max.dl.file.entry.count"));
094 int maxDLFileEntrySize = GetterUtil.getInteger(
095 arguments.get("sample.sql.max.dl.file.entry.size"));
096 int maxDLFolderCount = GetterUtil.getInteger(
097 arguments.get("sample.sql.max.dl.folder.count"));
098 int maxDLFolderDepth = GetterUtil.getInteger(
099 arguments.get("sample.sql.max.dl.folder.depth"));
100 int maxGroupCount = GetterUtil.getInteger(
101 arguments.get("sample.sql.max.group.count"));
102 int maxJournalArticleCount = GetterUtil.getInteger(
103 arguments.get("sample.sql.max.journal.article.count"));
104 int maxJournalArticleSize = GetterUtil.getInteger(
105 arguments.get("sample.sql.max.journal.article.size"));
106 int maxMBCategoryCount = GetterUtil.getInteger(
107 arguments.get("sample.sql.max.mb.category.count"));
108 int maxMBMessageCount = GetterUtil.getInteger(
109 arguments.get("sample.sql.max.mb.message.count"));
110 int maxMBThreadCount = GetterUtil.getInteger(
111 arguments.get("sample.sql.max.mb.thread.count"));
112 int maxUserCount = GetterUtil.getInteger(
113 arguments.get("sample.sql.max.user.count"));
114 int maxUserToGroupCount = GetterUtil.getInteger(
115 arguments.get("sample.sql.max.user.to.group.count"));
116 int maxWikiNodeCount = GetterUtil.getInteger(
117 arguments.get("sample.sql.max.wiki.node.count"));
118 int maxWikiPageCommentCount = GetterUtil.getInteger(
119 arguments.get("sample.sql.max.wiki.page.comment.count"));
120 int maxWikiPageCount = GetterUtil.getInteger(
121 arguments.get("sample.sql.max.wiki.page.count"));
122 int optimizeBufferSize = GetterUtil.getInteger(
123 arguments.get("sample.sql.optimize.buffer.size"));
124 String outputDir = arguments.get("sample.sql.output.dir");
125 boolean outputMerge = GetterUtil.getBoolean(
126 arguments.get("sample.sql.output.merge"));
127
128 new SampleSQLBuilder(
129 arguments, baseDir, dbType, maxBlogsEntryCommentCount,
130 maxBlogsEntryCount, maxDDLRecordCount, maxDDLRecordSetCount,
131 maxDLFileEntryCount, maxDLFileEntrySize, maxDLFolderCount,
132 maxDLFolderDepth, maxGroupCount, maxJournalArticleCount,
133 maxJournalArticleSize, maxMBCategoryCount, maxMBMessageCount,
134 maxMBThreadCount, maxUserCount, maxUserToGroupCount,
135 maxWikiNodeCount, maxWikiPageCommentCount, maxWikiPageCount,
136 optimizeBufferSize, outputDir, outputMerge);
137 }
138
139 public SampleSQLBuilder(
140 Map<String, String> arguments, String baseDir, String dbType,
141 int maxBlogsEntryCommentCount, int maxBlogsEntryCount,
142 int maxDDLRecordCount, int maxDDLRecordSetCount,
143 int maxDLFileEntryCount, int maxDLFileEntrySize, int maxDLFolderCount,
144 int maxDLFolderDepth, int maxGroupCount, int maxJournalArticleCount,
145 int maxJournalArticleSize, int maxMBCategoryCount,
146 int maxMBMessageCount, int maxMBThreadCount, int maxUserCount,
147 int maxUserToGroupCount, int maxWikiNodeCount,
148 int maxWikiPageCommentCount, int maxWikiPageCount,
149 int optimizeBufferSize, String outputDir, boolean outputMerge) {
150
151 try {
152 _dbType = dbType;
153 _maxBlogsEntryCommentCount = maxBlogsEntryCommentCount;
154 _maxBlogsEntryCount = maxBlogsEntryCount;
155 _maxDDLRecordCount = maxDDLRecordCount;
156 _maxDDLRecordSetCount = maxDDLRecordSetCount;
157 _maxDLFileEntryCount = maxDLFileEntryCount;
158 _maxDLFileEntrySize = maxDLFileEntrySize;
159 _maxDLFolderCount = maxDLFolderCount;
160 _maxDLFolderDepth = maxDLFolderDepth;
161 _maxGroupCount = maxGroupCount;
162 _maxJournalArticleCount = maxJournalArticleCount;
163 _maxMBCategoryCount = maxMBCategoryCount;
164 _maxMBMessageCount = maxMBMessageCount;
165 _maxMBThreadCount = maxMBThreadCount;
166 _maxUserCount = maxUserCount;
167 _maxUserToGroupCount = maxUserToGroupCount;
168 _maxWikiNodeCount = maxWikiNodeCount;
169 _maxWikiPageCommentCount = maxWikiPageCommentCount;
170 _maxWikiPageCount = maxWikiPageCount;
171 _optimizeBufferSize = optimizeBufferSize;
172 _outputDir = outputDir;
173 _outputMerge = outputMerge;
174
175 int totalMThreadCount = maxMBCategoryCount * maxMBThreadCount;
176 int totalMBMessageCount = totalMThreadCount * maxMBMessageCount;
177
178 int counterOffset =
179 _maxGroupCount +
180 (_maxGroupCount *
181 (maxMBCategoryCount + totalMThreadCount +
182 totalMBMessageCount)
183 ) + 1;
184
185 _counter = new SimpleCounter(counterOffset);
186 _dlDateCounter = new SimpleCounter();
187 _permissionCounter = new SimpleCounter();
188 _resourceCounter = new SimpleCounter();
189 _resourcePermissionCounter = new SimpleCounter();
190 _socialActivityCounter = new SimpleCounter();
191
192 _userScreenNameIncrementer = new SimpleCounter();
193
194 _dataFactory = new DataFactory(
195 baseDir, _maxGroupCount, maxJournalArticleSize,
196 _maxUserToGroupCount, _counter, _dlDateCounter,
197 _permissionCounter, _resourceCounter,
198 _resourcePermissionCounter, _socialActivityCounter);
199
200 _db = DBFactoryUtil.getDB(_dbType);
201
202 if (_db instanceof MySQLDB) {
203 _db = new SampleMySQLDB();
204 }
205
206
207
208 FileUtil.delete(_outputDir + "/sample-" + _dbType + ".sql");
209 FileUtil.deltree(_outputDir + "/output");
210
211
212
213 _tempDir = new File(_outputDir, "temp");
214
215 _tempDir.mkdirs();
216
217 final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);
218
219 generateSQL(charPipe);
220
221 try {
222
223
224
225 compressSQL(charPipe.getReader());
226
227
228
229 mergeSQL();
230 }
231 finally {
232 FileUtil.deltree(_tempDir);
233 }
234
235 StringBundler sb = new StringBundler();
236
237 List<String> keys = ListUtil.fromMapKeys(arguments);
238
239 Collections.sort(keys);
240
241 for (String key : keys) {
242 if (!key.startsWith("sample.sql")) {
243 continue;
244 }
245
246 String value = arguments.get(key);
247
248 sb.append(key);
249 sb.append(StringPool.EQUAL);
250 sb.append(value);
251 sb.append(StringPool.NEW_LINE);
252 }
253
254 FileUtil.write(
255 new File(_outputDir, "benchmarks-actual.properties"),
256 sb.toString());
257 }
258 catch (Exception e) {
259 e.printStackTrace();
260 }
261 }
262
263 public void insertBlogsEntry(BlogsEntry blogsEntry) throws Exception {
264 Map<String, Object> context = getContext();
265
266 put(context, "blogsEntry", blogsEntry);
267
268 processTemplate(_tplBlogsEntry, context);
269 }
270
271 public void insertDDLRecord(DDLRecord ddlRecord, DDLRecordSet ddlRecordSet)
272 throws Exception {
273
274 Map<String, Object> context = getContext();
275
276 put(context, "ddlRecord", ddlRecord);
277 put(context, "ddlRecordSet", ddlRecordSet);
278
279 processTemplate(_tplDDLRecord, context);
280 }
281
282 public void insertDDLRecordSet(DDMStructure ddmStructure) throws Exception {
283 Map<String, Object> context = getContext();
284
285 put(context, "ddmStructure", ddmStructure);
286
287 processTemplate(_tplDLFolders, context);
288 }
289
290 public void insertDLFileEntry(
291 DLFileEntry dlFileEntry, DDMStructure ddmStructure)
292 throws Exception {
293
294 Map<String, Object> context = getContext();
295
296 put(context, "ddmStructure", ddmStructure);
297 put(context, "dlFileEntry", dlFileEntry);
298
299 processTemplate(_tplDLFileEntry, context);
300 }
301
302 public void insertDLFolder(DLFolder dlFolder, DDMStructure ddmStructure)
303 throws Exception {
304
305 Map<String, Object> context = getContext();
306
307 put(context, "ddmStructure", ddmStructure);
308 put(context, "dlFolder", dlFolder);
309
310 processTemplate(_tplDLFolder, context);
311 }
312
313 public void insertDLFolders(
314 long parentDLFolderId, int dlFolderDepth, DDMStructure ddmStructure)
315 throws Exception {
316
317 Map<String, Object> context = getContext();
318
319 put(context, "ddmStructure", ddmStructure);
320 put(context, "dlFolderDepth", dlFolderDepth);
321 put(context, "parentDLFolderId", parentDLFolderId);
322
323 processTemplate(_tplDLFolders, context);
324 }
325
326 public void insertGroup(
327 Group group, List<Layout> privateLayouts,
328 List<Layout> publicLayouts)
329 throws Exception {
330
331 Map<String, Object> context = getContext();
332
333 put(context, "group", group);
334 put(context, "privateLayouts", privateLayouts);
335 put(context, "publicLayouts", publicLayouts);
336
337 processTemplate(_tplGroup, context);
338 }
339
340 public void insertJournalArticle(
341 long groupId, List<Layout> journalArticleLayouts)
342 throws Exception {
343
344 if ((journalArticleLayouts == null) ||
345 journalArticleLayouts.isEmpty()) {
346
347 return;
348 }
349
350 Map<String, Object> context = getContext();
351
352 put(context, "groupId", groupId);
353 put(context, "journalArticleLayouts", journalArticleLayouts);
354
355 processTemplate(_tplJournalArticle, context);
356 }
357
358 public void insertMBCategory(MBCategory mbCategory) throws Exception {
359 Map<String, Object> context = getContext();
360
361 put(context, "mbCategory", mbCategory);
362
363 processTemplate(_tplMBCategory, context);
364 }
365
366 public void insertMBMessage(MBMessage mbMessage) throws Exception {
367 Map<String, Object> context = getContext();
368
369 put(context, "mbMessage", mbMessage);
370
371 processTemplate(_tplMBMessage, context);
372 }
373
374 public void insertResourcePermission(String name, String primKey)
375 throws Exception {
376
377 Map<String, Object> context = getContext();
378
379 put(context, "resourceName", name);
380 put(context, "resourcePrimkey", primKey);
381
382 processTemplate(_tplResourcePermission, context);
383 }
384
385 public void insertUser(
386 Contact contact, Group group, List<Long> groupIds,
387 List<Long> organizationIds, List<Layout> privateLayouts,
388 List<Layout> publicLayouts, List<Role> roleIds, User user)
389 throws Exception {
390
391 Map<String, Object> context = getContext();
392
393 put(context, "contact", contact);
394 put(context, "group", group);
395 put(context, "groupIds", groupIds);
396 put(context, "organizationIds", organizationIds);
397 put(context, "privateLayouts", privateLayouts);
398 put(context, "publicLayouts", publicLayouts);
399 put(context, "roleIds", roleIds);
400 put(context, "user", user);
401
402 processTemplate(_tplUser, context);
403 }
404
405 public void insertWikiPage(WikiNode wikiNode, WikiPage wikiPage)
406 throws Exception {
407
408 Map<String, Object> context = getContext();
409
410 put(context, "wikiNode", wikiNode);
411 put(context, "wikiPage", wikiPage);
412
413 processTemplate(_tplWikiPage, context);
414 }
415
416 protected void compressInsertSQL(String insertSQL) throws IOException {
417 String tableName = insertSQL.substring(0, insertSQL.indexOf(' '));
418
419 int pos = insertSQL.indexOf(" values ") + 8;
420
421 String values = insertSQL.substring(pos, insertSQL.length() - 1);
422
423 StringBundler sb = _insertSQLs.get(tableName);
424
425 if ((sb == null) || (sb.index() == 0)) {
426 sb = new StringBundler();
427
428 _insertSQLs.put(tableName, sb);
429
430 sb.append("insert into ");
431 sb.append(insertSQL.substring(0, pos));
432 sb.append("\n");
433 }
434 else {
435 sb.append(",\n");
436 }
437
438 sb.append(values);
439
440 if (sb.index() >= _optimizeBufferSize) {
441 sb.append(";\n");
442
443 String sql = _db.buildSQL(sb.toString());
444
445 sb.setIndex(0);
446
447 writeToInsertSQLFile(tableName, sql);
448 }
449 }
450
451 protected void compressSQL(Reader reader) throws IOException {
452 UnsyncBufferedReader unsyncBufferedReader = new UnsyncBufferedReader(
453 reader);
454
455 String s = null;
456
457 while ((s = unsyncBufferedReader.readLine()) != null) {
458 s = s.trim();
459
460 if (s.length() > 0) {
461 if (s.startsWith("insert into ")) {
462 compressInsertSQL(s.substring(12));
463 }
464 else if (s.length() > 0) {
465 _otherSQLs.add(s);
466 }
467 }
468 }
469
470 unsyncBufferedReader.close();
471 }
472
473 protected Writer createFileWriter(File file) throws IOException {
474 FileOutputStream fileOutputStream = new FileOutputStream(file);
475
476 Writer writer = new OutputStreamWriter(fileOutputStream);
477
478 return createUnsyncBufferedWriter(writer);
479 }
480
481 protected Writer createFileWriter(String fileName) throws IOException {
482 File file = new File(fileName);
483
484 return createFileWriter(file);
485 }
486
487 protected Writer createUnsyncBufferedWriter(Writer writer) {
488 return new UnsyncBufferedWriter(writer, _WRITER_BUFFER_SIZE) {
489
490 @Override
491 public void flush() {
492
493
494
495 }
496
497 };
498 }
499
500 protected void generateSQL(final CharPipe charPipe) {
501 final Writer writer = createUnsyncBufferedWriter(charPipe.getWriter());
502
503 Thread thread = new Thread() {
504
505 @Override
506 public void run() {
507 try {
508 _writerSampleSQL = new UnsyncTeeWriter(
509 writer, createFileWriter(_outputDir + "/sample.sql"));
510
511 createSample();
512
513 _writerSampleSQL.close();
514
515 charPipe.close();
516 }
517 catch (Exception e) {
518 e.printStackTrace();
519 }
520 }
521
522 protected void createSample() throws Exception {
523 _writerBlogsCSV = getWriter("blogs.csv");
524 _writerCompanyCSV = getWriter("company.csv");
525 _writerDocumentLibraryCSV = getWriter("document_library.csv");
526 _writerDynamicDataListsCSV = getWriter(
527 "dynamic_data_lists.csv");
528 _writerLayoutCSV = getWriter("layout.csv");
529 _writerMessageBoardsCSV = getWriter("message_boards.csv");
530 _writerRepositoryCSV = getWriter("repository.csv");
531 _writerUserCSV = getWriter("user.csv");
532 _writerWikiCSV = getWriter("wiki.csv");
533
534 Map<String, Object> context = getContext();
535
536 processTemplate(_tplSample, context);
537
538 _writerBlogsCSV.close();
539 _writerCompanyCSV.close();
540 _writerDocumentLibraryCSV.close();
541 _writerDynamicDataListsCSV.close();
542 _writerLayoutCSV.close();
543 _writerMessageBoardsCSV.close();
544 _writerRepositoryCSV.close();
545 _writerUserCSV.close();
546 _writerWikiCSV.close();
547 }
548
549 protected Writer getWriter(String fileName) throws Exception {
550 return createFileWriter(new File(_outputDir + "/" + fileName));
551 }
552
553 };
554
555 thread.start();
556 }
557
558 protected Map<String, Object> getContext() {
559 Map<String, Object> context = new HashMap<String, Object>();
560
561 Company company = _dataFactory.getCompany();
562 User defaultUser = _dataFactory.getDefaultUser();
563
564 put(context, "companyId", company.getCompanyId());
565 put(context, "counter", _counter);
566 put(context, "dataFactory", _dataFactory);
567 put(context, "dateUtil", DateUtil_IW.getInstance());
568 put(context, "defaultUserId", defaultUser.getCompanyId());
569 put(context, "maxDLFileEntrySize", _maxDLFileEntrySize);
570 put(context, "maxBlogsEntryCommentCount", _maxBlogsEntryCommentCount);
571 put(context, "maxBlogsEntryCount", _maxBlogsEntryCount);
572 put(context, "maxDDLRecordCount", _maxDDLRecordCount);
573 put(context, "maxDDLRecordSetCount", _maxDDLRecordSetCount);
574 put(context, "maxDLFileEntryCount", _maxDLFileEntryCount);
575 put(context, "maxDLFolderCount", _maxDLFolderCount);
576 put(context, "maxDLFolderDepth", _maxDLFolderDepth);
577 put(context, "maxGroupCount", _maxGroupCount);
578 put(context, "maxJournalArticleCount", _maxJournalArticleCount);
579 put(context, "maxMBCategoryCount", _maxMBCategoryCount);
580 put(context, "maxMBMessageCount", _maxMBMessageCount);
581 put(context, "maxMBThreadCount", _maxMBThreadCount);
582 put(context, "maxUserCount", _maxUserCount);
583 put(context, "maxUserToGroupCount", _maxUserToGroupCount);
584 put(context, "maxWikiNodeCount", _maxWikiNodeCount);
585 put(context, "maxWikiPageCommentCount", _maxWikiPageCommentCount);
586 put(context, "maxWikiPageCount", _maxWikiPageCount);
587 put(context, "portalUUIDUtil", SequentialUUID.getSequentialUUID());
588 put(context, "sampleSQLBuilder", this);
589 put(context, "stringUtil", StringUtil_IW.getInstance());
590 put(context, "userScreenNameIncrementer", _userScreenNameIncrementer);
591 put(context, "writerBlogsCSV", _writerBlogsCSV);
592 put(context, "writerCompanyCSV", _writerCompanyCSV);
593 put(context, "writerDocumentLibraryCSV", _writerDocumentLibraryCSV);
594 put(context, "writerDynamicDataListsCSV", _writerDynamicDataListsCSV);
595 put(context, "writerLayoutCSV", _writerLayoutCSV);
596 put(context, "writerMessageBoardsCSV", _writerMessageBoardsCSV);
597 put(context, "writerRepositoryCSV", _writerRepositoryCSV);
598 put(context, "writerUserCSV", _writerUserCSV);
599 put(context, "writerWikiCSV", _writerWikiCSV);
600
601 return context;
602 }
603
604 protected File getInsertSQLFile(String tableName) {
605 return new File(_tempDir, tableName + ".sql");
606 }
607
608 protected void mergeSQL() throws IOException {
609 File outputFile = new File(_outputDir + "/sample-" + _dbType + ".sql");
610
611 FileOutputStream fileOutputStream = null;
612 FileChannel fileChannel = null;
613
614 if (_outputMerge) {
615 fileOutputStream = new FileOutputStream(outputFile);
616 fileChannel = fileOutputStream.getChannel();
617 }
618
619 Set<Map.Entry<String, StringBundler>> insertSQLs =
620 _insertSQLs.entrySet();
621
622 for (Map.Entry<String, StringBundler> entry : insertSQLs) {
623 String tableName = entry.getKey();
624
625 String sql = _db.buildSQL(entry.getValue().toString());
626
627 writeToInsertSQLFile(tableName, sql);
628
629 Writer insertSQLWriter = _insertSQLWriters.remove(tableName);
630
631 insertSQLWriter.write(";\n");
632
633 insertSQLWriter.close();
634
635 if (_outputMerge) {
636 File insertSQLFile = getInsertSQLFile(tableName);
637
638 FileInputStream insertSQLFileInputStream = new FileInputStream(
639 insertSQLFile);
640
641 FileChannel insertSQLFileChannel =
642 insertSQLFileInputStream.getChannel();
643
644 insertSQLFileChannel.transferTo(
645 0, insertSQLFileChannel.size(), fileChannel);
646
647 insertSQLFileChannel.close();
648
649 insertSQLFile.delete();
650 }
651 }
652
653 Writer writer = null;
654
655 if (_outputMerge) {
656 writer = new OutputStreamWriter(fileOutputStream);
657 }
658 else {
659 writer = new FileWriter(getInsertSQLFile("others"));
660 }
661
662 for (String sql : _otherSQLs) {
663 sql = _db.buildSQL(sql);
664
665 writer.write(sql);
666 writer.write(StringPool.NEW_LINE);
667 }
668
669 writer.close();
670
671 File outputFolder = new File(_outputDir, "output");
672
673 if (!_outputMerge && !_tempDir.renameTo(outputFolder)) {
674
675
676
677
678 FileUtil.copyDirectory(_tempDir, outputFolder);
679 }
680 }
681
682 protected void processTemplate(String name, Map<String, Object> context)
683 throws Exception {
684
685 FreeMarkerUtil.process(name, context, _writerSampleSQL);
686 }
687
688 protected void put(Map<String, Object> context, String key, Object value) {
689 context.put(key, value);
690 }
691
692 protected void writeToInsertSQLFile(String tableName, String sql)
693 throws IOException {
694
695 Writer writer = _insertSQLWriters.get(tableName);
696
697 if (writer == null) {
698 File file = getInsertSQLFile(tableName);
699
700 writer = createFileWriter(file);
701
702 _insertSQLWriters.put(tableName, writer);
703 }
704
705 writer.write(sql);
706 }
707
708 private static final int _PIPE_BUFFER_SIZE = 16 * 1024 * 1024;
709
710 private static final String _TPL_ROOT =
711 "com/liferay/portal/tools/samplesqlbuilder/dependencies/";
712
713 private static final int _WRITER_BUFFER_SIZE = 16 * 1024;
714
715 private SimpleCounter _counter;
716 private DataFactory _dataFactory;
717 private DB _db;
718 private String _dbType;
719 private SimpleCounter _dlDateCounter;
720 private Map<String, StringBundler> _insertSQLs =
721 new ConcurrentHashMap<String, StringBundler>();
722 private Map<String, Writer> _insertSQLWriters =
723 new ConcurrentHashMap<String, Writer>();
724 private int _maxBlogsEntryCommentCount;
725 private int _maxBlogsEntryCount;
726 private int _maxDDLRecordCount;
727 private int _maxDDLRecordSetCount;
728 private int _maxDLFileEntryCount;
729 private int _maxDLFileEntrySize;
730 private int _maxDLFolderCount;
731 private int _maxDLFolderDepth;
732 private int _maxGroupCount;
733 private int _maxJournalArticleCount;
734 private int _maxMBCategoryCount;
735 private int _maxMBMessageCount;
736 private int _maxMBThreadCount;
737 private int _maxUserCount;
738 private int _maxUserToGroupCount;
739 private int _maxWikiNodeCount;
740 private int _maxWikiPageCommentCount;
741 private int _maxWikiPageCount;
742 private int _optimizeBufferSize;
743 private List<String> _otherSQLs = new ArrayList<String>();
744 private String _outputDir;
745 private boolean _outputMerge;
746 private SimpleCounter _permissionCounter;
747 private SimpleCounter _resourceCounter;
748 private SimpleCounter _resourcePermissionCounter;
749 private SimpleCounter _socialActivityCounter;
750 private File _tempDir;
751 private String _tplBlogsEntry = _TPL_ROOT + "blogs_entry.ftl";
752 private String _tplDDLRecord = _TPL_ROOT + "ddl_record.ftl";
753 private String _tplDLFileEntry = _TPL_ROOT + "dl_file_entry.ftl";
754 private String _tplDLFolder = _TPL_ROOT + "dl_folder.ftl";
755 private String _tplDLFolders = _TPL_ROOT + "dl_folders.ftl";
756 private String _tplGroup = _TPL_ROOT + "group.ftl";
757 private String _tplJournalArticle = _TPL_ROOT + "journal_article.ftl";
758 private String _tplMBCategory = _TPL_ROOT + "mb_category.ftl";
759 private String _tplMBMessage = _TPL_ROOT + "mb_message.ftl";;
760 private String _tplResourcePermission =
761 _TPL_ROOT + "resource_permission.ftl";
762 private String _tplSample = _TPL_ROOT + "sample.ftl";
763 private String _tplUser = _TPL_ROOT + "user.ftl";
764 private String _tplWikiPage = _TPL_ROOT + "wiki_page.ftl";
765 private SimpleCounter _userScreenNameIncrementer;
766 private Writer _writerBlogsCSV;
767 private Writer _writerCompanyCSV;
768 private Writer _writerDocumentLibraryCSV;
769 private Writer _writerDynamicDataListsCSV;
770 private Writer _writerLayoutCSV;
771 private Writer _writerMessageBoardsCSV;
772 private Writer _writerRepositoryCSV;
773 private Writer _writerSampleSQL;
774 private Writer _writerUserCSV;
775 private Writer _writerWikiCSV;
776
777 }