001
014
015 package com.liferay.portal.tools.samplesqlbuilder;
016
017 import com.liferay.portal.dao.db.MySQLDB;
018 import com.liferay.portal.freemarker.FreeMarkerUtil;
019 import com.liferay.portal.kernel.dao.db.DB;
020 import com.liferay.portal.kernel.dao.db.DBFactoryUtil;
021 import com.liferay.portal.kernel.io.CharPipe;
022 import com.liferay.portal.kernel.io.OutputStreamWriter;
023 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedReader;
024 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedWriter;
025 import com.liferay.portal.kernel.io.unsync.UnsyncTeeWriter;
026 import com.liferay.portal.kernel.util.DateUtil_IW;
027 import com.liferay.portal.kernel.util.FileUtil;
028 import com.liferay.portal.kernel.util.GetterUtil;
029 import com.liferay.portal.kernel.util.ListUtil;
030 import com.liferay.portal.kernel.util.StringBundler;
031 import com.liferay.portal.kernel.util.StringPool;
032 import com.liferay.portal.kernel.util.StringUtil_IW;
033 import com.liferay.portal.model.Contact;
034 import com.liferay.portal.model.Group;
035 import com.liferay.portal.model.Layout;
036 import com.liferay.portal.model.Role;
037 import com.liferay.portal.model.User;
038 import com.liferay.portal.tools.ArgumentsUtil;
039 import com.liferay.portal.util.InitUtil;
040 import com.liferay.portlet.blogs.model.BlogsEntry;
041 import com.liferay.portlet.documentlibrary.model.DLFileEntry;
042 import com.liferay.portlet.documentlibrary.model.DLFolder;
043 import com.liferay.portlet.dynamicdatalists.model.DDLRecord;
044 import com.liferay.portlet.dynamicdatalists.model.DDLRecordSet;
045 import com.liferay.portlet.dynamicdatamapping.model.DDMStructure;
046 import com.liferay.portlet.messageboards.model.MBCategory;
047 import com.liferay.portlet.messageboards.model.MBMessage;
048 import com.liferay.portlet.wiki.model.WikiNode;
049 import com.liferay.portlet.wiki.model.WikiPage;
050
051 import java.io.File;
052 import java.io.FileInputStream;
053 import java.io.FileOutputStream;
054 import java.io.FileWriter;
055 import java.io.IOException;
056 import java.io.Reader;
057 import java.io.Writer;
058
059 import java.nio.channels.FileChannel;
060
061 import java.util.ArrayList;
062 import java.util.Collections;
063 import java.util.HashMap;
064 import java.util.List;
065 import java.util.Map;
066 import java.util.Set;
067 import java.util.concurrent.ConcurrentHashMap;
068
069
073 public class SampleSQLBuilder {
074
075 public static void main(String[] args) {
076 Map<String, String> arguments = ArgumentsUtil.parseArguments(args);
077
078 InitUtil.initWithSpring();
079
080 try {
081 new SampleSQLBuilder(arguments);
082 }
083 catch (Exception e) {
084 e.printStackTrace();
085 }
086 }
087
088 public SampleSQLBuilder(Map<String, String> arguments) throws Exception {
089 String baseDir = arguments.get("sample.sql.base.dir");
090
091 _dbType = arguments.get("sample.sql.db.type");
092 _maxBlogsEntryCommentCount = GetterUtil.getInteger(
093 arguments.get("sample.sql.max.blogs.entry.comment.count"));
094 _maxBlogsEntryCount = GetterUtil.getInteger(
095 arguments.get("sample.sql.max.blogs.entry.count"));
096 _maxDDLRecordCount = GetterUtil.getInteger(
097 arguments.get("sample.sql.max.ddl.record.count"));
098 _maxDDLRecordSetCount = GetterUtil.getInteger(
099 arguments.get("sample.sql.max.ddl.record.set.count"));
100 _maxDLFileEntryCount = GetterUtil.getInteger(
101 arguments.get("sample.sql.max.dl.file.entry.count"));
102 _maxDLFileEntrySize = GetterUtil.getInteger(
103 arguments.get("sample.sql.max.dl.file.entry.size"));
104 _maxDLFolderCount = GetterUtil.getInteger(
105 arguments.get("sample.sql.max.dl.folder.count"));
106 _maxDLFolderDepth = GetterUtil.getInteger(
107 arguments.get("sample.sql.max.dl.folder.depth"));
108 _maxGroupCount = GetterUtil.getInteger(
109 arguments.get("sample.sql.max.group.count"));
110 _maxJournalArticleCount = GetterUtil.getInteger(
111 arguments.get("sample.sql.max.journal.article.count"));
112 _maxJournalArticleSize = GetterUtil.getInteger(
113 arguments.get("sample.sql.max.journal.article.size"));
114 _maxMBCategoryCount = GetterUtil.getInteger(
115 arguments.get("sample.sql.max.mb.category.count"));
116 _maxMBMessageCount = GetterUtil.getInteger(
117 arguments.get("sample.sql.max.mb.message.count"));
118 _maxMBThreadCount = GetterUtil.getInteger(
119 arguments.get("sample.sql.max.mb.thread.count"));
120 _maxUserCount = GetterUtil.getInteger(
121 arguments.get("sample.sql.max.user.count"));
122 _maxUserToGroupCount = GetterUtil.getInteger(
123 arguments.get("sample.sql.max.user.to.group.count"));
124 _maxWikiNodeCount = GetterUtil.getInteger(
125 arguments.get("sample.sql.max.wiki.node.count"));
126 _maxWikiPageCommentCount = GetterUtil.getInteger(
127 arguments.get("sample.sql.max.wiki.page.comment.count"));
128 _maxWikiPageCount = GetterUtil.getInteger(
129 arguments.get("sample.sql.max.wiki.page.count"));
130 _optimizeBufferSize = GetterUtil.getInteger(
131 arguments.get("sample.sql.optimize.buffer.size"));
132 _outputDir = arguments.get("sample.sql.output.dir");
133 _outputMerge = GetterUtil.getBoolean(
134 arguments.get("sample.sql.output.merge"));
135
136 _dataFactory = new DataFactory(
137 baseDir, _maxGroupCount, _maxJournalArticleSize,
138 _maxMBCategoryCount, _maxMBThreadCount, _maxMBMessageCount,
139 _maxUserToGroupCount);
140
141 _db = DBFactoryUtil.getDB(_dbType);
142
143 if (_db instanceof MySQLDB) {
144 _db = new SampleMySQLDB();
145 }
146
147
148
149 FileUtil.delete(_outputDir + "/sample-" + _dbType + ".sql");
150 FileUtil.deltree(_outputDir + "/output");
151
152
153
154 _tempDir = new File(_outputDir, "temp");
155
156 _tempDir.mkdirs();
157
158 final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);
159
160 generateSQL(charPipe);
161
162 try {
163
164
165
166 compressSQL(charPipe.getReader());
167
168
169
170 mergeSQL();
171 }
172 finally {
173 FileUtil.deltree(_tempDir);
174 }
175
176 StringBundler sb = new StringBundler();
177
178 List<String> keys = ListUtil.fromMapKeys(arguments);
179
180 Collections.sort(keys);
181
182 for (String key : keys) {
183 if (!key.startsWith("sample.sql")) {
184 continue;
185 }
186
187 String value = arguments.get(key);
188
189 sb.append(key);
190 sb.append(StringPool.EQUAL);
191 sb.append(value);
192 sb.append(StringPool.NEW_LINE);
193 }
194
195 FileUtil.write(
196 new File(_outputDir, "benchmarks-actual.properties"),
197 sb.toString());
198 }
199
200 public void insertBlogsEntry(BlogsEntry blogsEntry) throws Exception {
201 Map<String, Object> context = getContext();
202
203 put(context, "blogsEntry", blogsEntry);
204
205 processTemplate(_tplBlogsEntry, context);
206 }
207
208 public void insertDDLRecord(
209 DDLRecord ddlRecord, DDLRecordSet ddlRecordSet, int ddlRecordCount)
210 throws Exception {
211
212 Map<String, Object> context = getContext();
213
214 put(context, "ddlRecord", ddlRecord);
215 put(context, "ddlRecordCount", ddlRecordCount);
216 put(context, "ddlRecordSet", ddlRecordSet);
217
218 processTemplate(_tplDDLRecord, context);
219 }
220
221 public void insertDDLRecordSet(DDMStructure ddmStructure) throws Exception {
222 Map<String, Object> context = getContext();
223
224 put(context, "ddmStructure", ddmStructure);
225
226 processTemplate(_tplDLFolders, context);
227 }
228
229 public void insertDLFileEntry(
230 DLFileEntry dlFileEntry, DDMStructure ddmStructure)
231 throws Exception {
232
233 Map<String, Object> context = getContext();
234
235 put(context, "ddmStructure", ddmStructure);
236 put(context, "dlFileEntry", dlFileEntry);
237
238 processTemplate(_tplDLFileEntry, context);
239 }
240
241 public void insertDLFolder(DLFolder dlFolder, DDMStructure ddmStructure)
242 throws Exception {
243
244 Map<String, Object> context = getContext();
245
246 put(context, "ddmStructure", ddmStructure);
247 put(context, "dlFolder", dlFolder);
248
249 processTemplate(_tplDLFolder, context);
250 }
251
252 public void insertDLFolders(
253 long parentDLFolderId, int dlFolderDepth, DDMStructure ddmStructure)
254 throws Exception {
255
256 Map<String, Object> context = getContext();
257
258 put(context, "ddmStructure", ddmStructure);
259 put(context, "dlFolderDepth", dlFolderDepth);
260 put(context, "parentDLFolderId", parentDLFolderId);
261
262 processTemplate(_tplDLFolders, context);
263 }
264
265 public void insertGroup(
266 Group group, List<Layout> privateLayouts,
267 List<Layout> publicLayouts)
268 throws Exception {
269
270 Map<String, Object> context = getContext();
271
272 put(context, "group", group);
273 put(context, "privateLayouts", privateLayouts);
274 put(context, "publicLayouts", publicLayouts);
275
276 processTemplate(_tplGroup, context);
277 }
278
279 public void insertMBCategory(MBCategory mbCategory) throws Exception {
280 Map<String, Object> context = getContext();
281
282 put(context, "mbCategory", mbCategory);
283
284 processTemplate(_tplMBCategory, context);
285 }
286
287 public void insertMBMessage(MBMessage mbMessage) throws Exception {
288 Map<String, Object> context = getContext();
289
290 put(context, "mbMessage", mbMessage);
291
292 processTemplate(_tplMBMessage, context);
293 }
294
295 public void insertResourcePermission(String name, String primKey)
296 throws Exception {
297
298 Map<String, Object> context = getContext();
299
300 put(context, "resourceName", name);
301 put(context, "resourcePrimkey", primKey);
302
303 processTemplate(_tplResourcePermission, context);
304 }
305
306 public void insertUser(
307 Contact contact, List<Long> groupIds, List<Long> organizationIds,
308 List<Role> roleIds, User user)
309 throws Exception {
310
311 Map<String, Object> context = getContext();
312
313 put(context, "contact", contact);
314 put(context, "groupIds", groupIds);
315 put(context, "organizationIds", organizationIds);
316 put(context, "roleIds", roleIds);
317 put(context, "user", user);
318
319 processTemplate(_tplUser, context);
320 }
321
322 public void insertWikiPage(WikiNode wikiNode, WikiPage wikiPage)
323 throws Exception {
324
325 Map<String, Object> context = getContext();
326
327 put(context, "wikiNode", wikiNode);
328 put(context, "wikiPage", wikiPage);
329
330 processTemplate(_tplWikiPage, context);
331 }
332
333 protected void compressInsertSQL(String insertSQL) throws IOException {
334 String tableName = insertSQL.substring(0, insertSQL.indexOf(' '));
335
336 int pos = insertSQL.indexOf(" values ") + 8;
337
338 String values = insertSQL.substring(pos, insertSQL.length() - 1);
339
340 StringBundler sb = _insertSQLs.get(tableName);
341
342 if ((sb == null) || (sb.index() == 0)) {
343 sb = new StringBundler();
344
345 _insertSQLs.put(tableName, sb);
346
347 sb.append("insert into ");
348 sb.append(insertSQL.substring(0, pos));
349 sb.append("\n");
350 }
351 else {
352 sb.append(",\n");
353 }
354
355 sb.append(values);
356
357 if (sb.index() >= _optimizeBufferSize) {
358 sb.append(";\n");
359
360 String sql = _db.buildSQL(sb.toString());
361
362 sb.setIndex(0);
363
364 writeToInsertSQLFile(tableName, sql);
365 }
366 }
367
368 protected void compressSQL(Reader reader) throws IOException {
369 UnsyncBufferedReader unsyncBufferedReader = new UnsyncBufferedReader(
370 reader);
371
372 String s = null;
373
374 while ((s = unsyncBufferedReader.readLine()) != null) {
375 s = s.trim();
376
377 if (s.length() > 0) {
378 if (s.startsWith("insert into ")) {
379 compressInsertSQL(s.substring(12));
380 }
381 else if (s.length() > 0) {
382 _otherSQLs.add(s);
383 }
384 }
385 }
386
387 unsyncBufferedReader.close();
388 }
389
390 protected Writer createFileWriter(File file) throws IOException {
391 FileOutputStream fileOutputStream = new FileOutputStream(file);
392
393 Writer writer = new OutputStreamWriter(fileOutputStream);
394
395 return createUnsyncBufferedWriter(writer);
396 }
397
398 protected Writer createFileWriter(String fileName) throws IOException {
399 File file = new File(fileName);
400
401 return createFileWriter(file);
402 }
403
404 protected Writer createUnsyncBufferedWriter(Writer writer) {
405 return new UnsyncBufferedWriter(writer, _WRITER_BUFFER_SIZE) {
406
407 @Override
408 public void flush() {
409
410
411
412 }
413
414 };
415 }
416
417 protected void generateSQL(final CharPipe charPipe) {
418 final Writer writer = createUnsyncBufferedWriter(charPipe.getWriter());
419
420 Thread thread = new Thread() {
421
422 @Override
423 public void run() {
424 try {
425 _writerSampleSQL = new UnsyncTeeWriter(
426 writer, createFileWriter(_outputDir + "/sample.sql"));
427
428 createSample();
429
430 _writerSampleSQL.close();
431
432 charPipe.close();
433 }
434 catch (Exception e) {
435 e.printStackTrace();
436 }
437 }
438
439 protected void createSample() throws Exception {
440 _writerBlogsCSV = getWriter("blogs.csv");
441 _writerCompanyCSV = getWriter("company.csv");
442 _writerDocumentLibraryCSV = getWriter("document_library.csv");
443 _writerDynamicDataListsCSV = getWriter(
444 "dynamic_data_lists.csv");
445 _writerLayoutCSV = getWriter("layout.csv");
446 _writerMessageBoardsCSV = getWriter("message_boards.csv");
447 _writerRepositoryCSV = getWriter("repository.csv");
448 _writerWikiCSV = getWriter("wiki.csv");
449
450 Map<String, Object> context = getContext();
451
452 processTemplate(_tplSample, context);
453
454 _writerBlogsCSV.close();
455 _writerCompanyCSV.close();
456 _writerDocumentLibraryCSV.close();
457 _writerDynamicDataListsCSV.close();
458 _writerLayoutCSV.close();
459 _writerMessageBoardsCSV.close();
460 _writerRepositoryCSV.close();
461 _writerWikiCSV.close();
462 }
463
464 protected Writer getWriter(String fileName) throws Exception {
465 return createFileWriter(new File(_outputDir + "/" + fileName));
466 }
467
468 };
469
470 thread.start();
471 }
472
473 protected Map<String, Object> getContext() {
474 Map<String, Object> context = new HashMap<String, Object>();
475
476 put(context, "companyId", _dataFactory.getCompanyId());
477 put(context, "counter", _dataFactory.getCounter());
478 put(context, "dataFactory", _dataFactory);
479 put(context, "dateUtil", DateUtil_IW.getInstance());
480 put(context, "defaultUserId", _dataFactory.getDefaultUserId());
481 put(context, "maxDLFileEntrySize", _maxDLFileEntrySize);
482 put(context, "maxBlogsEntryCommentCount", _maxBlogsEntryCommentCount);
483 put(context, "maxBlogsEntryCount", _maxBlogsEntryCount);
484 put(context, "maxDDLRecordCount", _maxDDLRecordCount);
485 put(context, "maxDDLRecordSetCount", _maxDDLRecordSetCount);
486 put(context, "maxDLFileEntryCount", _maxDLFileEntryCount);
487 put(context, "maxDLFolderCount", _maxDLFolderCount);
488 put(context, "maxDLFolderDepth", _maxDLFolderDepth);
489 put(context, "maxGroupCount", _maxGroupCount);
490 put(context, "maxJournalArticleCount", _maxJournalArticleCount);
491 put(context, "maxMBCategoryCount", _maxMBCategoryCount);
492 put(context, "maxMBMessageCount", _maxMBMessageCount);
493 put(context, "maxMBThreadCount", _maxMBThreadCount);
494 put(context, "maxUserCount", _maxUserCount);
495 put(context, "maxUserToGroupCount", _maxUserToGroupCount);
496 put(context, "maxWikiNodeCount", _maxWikiNodeCount);
497 put(context, "maxWikiPageCommentCount", _maxWikiPageCommentCount);
498 put(context, "maxWikiPageCount", _maxWikiPageCount);
499 put(context, "portalUUIDUtil", SequentialUUID.getSequentialUUID());
500 put(context, "sampleSQLBuilder", this);
501 put(context, "stringUtil", StringUtil_IW.getInstance());
502 put(
503 context, "userScreenNameCounter",
504 _dataFactory.getUserScreenNameCounter());
505 put(context, "writerBlogsCSV", _writerBlogsCSV);
506 put(context, "writerCompanyCSV", _writerCompanyCSV);
507 put(context, "writerDocumentLibraryCSV", _writerDocumentLibraryCSV);
508 put(context, "writerDynamicDataListsCSV", _writerDynamicDataListsCSV);
509 put(context, "writerLayoutCSV", _writerLayoutCSV);
510 put(context, "writerMessageBoardsCSV", _writerMessageBoardsCSV);
511 put(context, "writerRepositoryCSV", _writerRepositoryCSV);
512 put(context, "writerWikiCSV", _writerWikiCSV);
513
514 return context;
515 }
516
517 protected File getInsertSQLFile(String tableName) {
518 return new File(_tempDir, tableName + ".sql");
519 }
520
521 protected void mergeSQL() throws IOException {
522 File outputFile = new File(_outputDir + "/sample-" + _dbType + ".sql");
523
524 FileOutputStream fileOutputStream = null;
525 FileChannel fileChannel = null;
526
527 if (_outputMerge) {
528 fileOutputStream = new FileOutputStream(outputFile);
529 fileChannel = fileOutputStream.getChannel();
530 }
531
532 Set<Map.Entry<String, StringBundler>> insertSQLs =
533 _insertSQLs.entrySet();
534
535 for (Map.Entry<String, StringBundler> entry : insertSQLs) {
536 String tableName = entry.getKey();
537
538 String sql = _db.buildSQL(entry.getValue().toString());
539
540 writeToInsertSQLFile(tableName, sql);
541
542 Writer insertSQLWriter = _insertSQLWriters.remove(tableName);
543
544 insertSQLWriter.write(";\n");
545
546 insertSQLWriter.close();
547
548 if (_outputMerge) {
549 File insertSQLFile = getInsertSQLFile(tableName);
550
551 FileInputStream insertSQLFileInputStream = new FileInputStream(
552 insertSQLFile);
553
554 FileChannel insertSQLFileChannel =
555 insertSQLFileInputStream.getChannel();
556
557 insertSQLFileChannel.transferTo(
558 0, insertSQLFileChannel.size(), fileChannel);
559
560 insertSQLFileChannel.close();
561
562 insertSQLFile.delete();
563 }
564 }
565
566 Writer writer = null;
567
568 if (_outputMerge) {
569 writer = new OutputStreamWriter(fileOutputStream);
570 }
571 else {
572 writer = new FileWriter(getInsertSQLFile("others"));
573 }
574
575 for (String sql : _otherSQLs) {
576 sql = _db.buildSQL(sql);
577
578 writer.write(sql);
579 writer.write(StringPool.NEW_LINE);
580 }
581
582 writer.close();
583
584 File outputFolder = new File(_outputDir, "output");
585
586 if (!_outputMerge && !_tempDir.renameTo(outputFolder)) {
587
588
589
590
591 FileUtil.copyDirectory(_tempDir, outputFolder);
592 }
593 }
594
595 protected void processTemplate(String name, Map<String, Object> context)
596 throws Exception {
597
598 FreeMarkerUtil.process(name, context, _writerSampleSQL);
599 }
600
601 protected void put(Map<String, Object> context, String key, Object value) {
602 context.put(key, value);
603 }
604
605 protected void writeToInsertSQLFile(String tableName, String sql)
606 throws IOException {
607
608 Writer writer = _insertSQLWriters.get(tableName);
609
610 if (writer == null) {
611 File file = getInsertSQLFile(tableName);
612
613 writer = createFileWriter(file);
614
615 _insertSQLWriters.put(tableName, writer);
616 }
617
618 writer.write(sql);
619 }
620
621 private static final int _PIPE_BUFFER_SIZE = 16 * 1024 * 1024;
622
623 private static final String _TPL_ROOT =
624 "com/liferay/portal/tools/samplesqlbuilder/dependencies/";
625
626 private static final int _WRITER_BUFFER_SIZE = 16 * 1024;
627
628 private DataFactory _dataFactory;
629 private DB _db;
630 private String _dbType;
631 private Map<String, StringBundler> _insertSQLs =
632 new ConcurrentHashMap<String, StringBundler>();
633 private Map<String, Writer> _insertSQLWriters =
634 new ConcurrentHashMap<String, Writer>();
635 private int _maxBlogsEntryCommentCount;
636 private int _maxBlogsEntryCount;
637 private int _maxDDLRecordCount;
638 private int _maxDDLRecordSetCount;
639 private int _maxDLFileEntryCount;
640 private int _maxDLFileEntrySize;
641 private int _maxDLFolderCount;
642 private int _maxDLFolderDepth;
643 private int _maxGroupCount;
644 private int _maxJournalArticleCount;
645 private int _maxJournalArticleSize;
646 private int _maxMBCategoryCount;
647 private int _maxMBMessageCount;
648 private int _maxMBThreadCount;
649 private int _maxUserCount;
650 private int _maxUserToGroupCount;
651 private int _maxWikiNodeCount;
652 private int _maxWikiPageCommentCount;
653 private int _maxWikiPageCount;
654 private int _optimizeBufferSize;
655 private List<String> _otherSQLs = new ArrayList<String>();
656 private String _outputDir;
657 private boolean _outputMerge;
658 private File _tempDir;
659 private String _tplBlogsEntry = _TPL_ROOT + "blogs_entry.ftl";
660 private String _tplDDLRecord = _TPL_ROOT + "ddl_record.ftl";
661 private String _tplDLFileEntry = _TPL_ROOT + "dl_file_entry.ftl";
662 private String _tplDLFolder = _TPL_ROOT + "dl_folder.ftl";
663 private String _tplDLFolders = _TPL_ROOT + "dl_folders.ftl";
664 private String _tplGroup = _TPL_ROOT + "group.ftl";
665 private String _tplMBCategory = _TPL_ROOT + "mb_category.ftl";
666 private String _tplMBMessage = _TPL_ROOT + "mb_message.ftl";;
667 private String _tplResourcePermission =
668 _TPL_ROOT + "resource_permission.ftl";
669 private String _tplSample = _TPL_ROOT + "sample.ftl";
670 private String _tplUser = _TPL_ROOT + "user.ftl";
671 private String _tplWikiPage = _TPL_ROOT + "wiki_page.ftl";
672 private Writer _writerBlogsCSV;
673 private Writer _writerCompanyCSV;
674 private Writer _writerDocumentLibraryCSV;
675 private Writer _writerDynamicDataListsCSV;
676 private Writer _writerLayoutCSV;
677 private Writer _writerMessageBoardsCSV;
678 private Writer _writerRepositoryCSV;
679 private Writer _writerSampleSQL;
680 private Writer _writerWikiCSV;
681
682 }