001
014
015 package com.liferay.portal.tools.samplesqlbuilder;
016
017 import com.liferay.portal.dao.db.MySQLDB;
018 import com.liferay.portal.freemarker.FreeMarkerUtil;
019 import com.liferay.portal.kernel.dao.db.DB;
020 import com.liferay.portal.kernel.dao.db.DBFactoryUtil;
021 import com.liferay.portal.kernel.io.CharPipe;
022 import com.liferay.portal.kernel.io.OutputStreamWriter;
023 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedReader;
024 import com.liferay.portal.kernel.io.unsync.UnsyncBufferedWriter;
025 import com.liferay.portal.kernel.io.unsync.UnsyncTeeWriter;
026 import com.liferay.portal.kernel.util.FileUtil;
027 import com.liferay.portal.kernel.util.GetterUtil;
028 import com.liferay.portal.kernel.util.SortedProperties;
029 import com.liferay.portal.kernel.util.StringBundler;
030 import com.liferay.portal.kernel.util.StringPool;
031 import com.liferay.portal.kernel.util.StringUtil;
032 import com.liferay.portal.util.InitUtil;
033
034 import java.io.File;
035 import java.io.FileInputStream;
036 import java.io.FileOutputStream;
037 import java.io.FileReader;
038 import java.io.FileWriter;
039 import java.io.IOException;
040 import java.io.Reader;
041 import java.io.Writer;
042
043 import java.nio.channels.FileChannel;
044
045 import java.util.ArrayList;
046 import java.util.HashMap;
047 import java.util.List;
048 import java.util.Map;
049 import java.util.Properties;
050
051
055 public class SampleSQLBuilder {
056
057 public static void main(String[] args) {
058 try {
059 new SampleSQLBuilder(args);
060 }
061 catch (Exception e) {
062 e.printStackTrace();
063 }
064 }
065
066 public SampleSQLBuilder(String[] args) throws Exception {
067 InitUtil.initWithSpring();
068
069 Properties properties = getProperties(args);
070
071 _dbType = properties.getProperty("sample.sql.db.type");
072
073 _csvFileNames = StringUtil.split(
074 properties.getProperty("sample.sql.output.csv.file.names"));
075 _optimizeBufferSize = GetterUtil.getInteger(
076 properties.getProperty("sample.sql.optimize.buffer.size"));
077 _outputDir = properties.getProperty("sample.sql.output.dir");
078 _script = properties.getProperty("sample.sql.script");
079
080 _dataFactory = new DataFactory(properties);
081
082
083
084 Reader reader = generateSQL();
085
086 File tempDir = new File(_outputDir, "temp");
087
088 tempDir.mkdirs();
089
090 try {
091
092
093
094 compressSQL(reader, tempDir);
095
096
097
098 boolean outputMerge = GetterUtil.getBoolean(
099 properties.getProperty("sample.sql.output.merge"));
100
101 if (outputMerge) {
102 File sqlFile = new File(
103 _outputDir, "sample-" + _dbType + ".sql");
104
105 FileUtil.delete(sqlFile);
106
107 mergeSQL(tempDir, sqlFile);
108 }
109 else {
110 File outputDir = new File(_outputDir, "output");
111
112 FileUtil.deltree(outputDir);
113
114 if (!tempDir.renameTo(outputDir)) {
115
116
117
118
119 FileUtil.copyDirectory(tempDir, outputDir);
120 }
121 }
122 }
123 finally {
124 FileUtil.deltree(tempDir);
125 }
126
127 StringBundler sb = new StringBundler();
128
129 for (String key : properties.stringPropertyNames()) {
130 if (!key.startsWith("sample.sql")) {
131 continue;
132 }
133
134 String value = properties.getProperty(key);
135
136 sb.append(key);
137 sb.append(StringPool.EQUAL);
138 sb.append(value);
139 sb.append(StringPool.NEW_LINE);
140 }
141
142 FileUtil.write(
143 new File(_outputDir, "benchmarks-actual.properties"),
144 sb.toString());
145 }
146
147 protected void compressSQL(
148 DB db, File directory, Map<String, Writer> insertSQLWriters,
149 Map<String, StringBundler> sqls, String insertSQL)
150 throws IOException {
151
152 String tableName = insertSQL.substring(0, insertSQL.indexOf(' '));
153
154 int index = insertSQL.indexOf(" values ") + 8;
155
156 StringBundler sb = sqls.get(tableName);
157
158 if ((sb == null) || (sb.index() == 0)) {
159 sb = new StringBundler();
160
161 sqls.put(tableName, sb);
162
163 sb.append("insert into ");
164 sb.append(insertSQL.substring(0, index));
165 sb.append("\n");
166 }
167 else {
168 sb.append(",\n");
169 }
170
171 String values = insertSQL.substring(index, insertSQL.length() - 1);
172
173 sb.append(values);
174
175 if (sb.index() >= _optimizeBufferSize) {
176 sb.append(";\n");
177
178 insertSQL = db.buildSQL(sb.toString());
179
180 sb.setIndex(0);
181
182 writeToInsertSQLFile(
183 directory, tableName, insertSQLWriters, insertSQL);
184 }
185 }
186
187 protected void compressSQL(Reader reader, File dir) throws IOException {
188 DB db = DBFactoryUtil.getDB(_dbType);
189
190 if (db instanceof MySQLDB) {
191 db = new SampleMySQLDB();
192 }
193
194 Map<String, Writer> insertSQLWriters = new HashMap<String, Writer>();
195 Map<String, StringBundler> insertSQLs =
196 new HashMap<String, StringBundler>();
197 List<String> miscSQLs = new ArrayList<String>();
198
199 UnsyncBufferedReader unsyncBufferedReader = new UnsyncBufferedReader(
200 reader);
201
202 String s = null;
203
204 while ((s = unsyncBufferedReader.readLine()) != null) {
205 s = s.trim();
206
207 if (s.length() > 0) {
208 if (s.startsWith("insert into ")) {
209 compressSQL(
210 db, dir, insertSQLWriters, insertSQLs, s.substring(12));
211 }
212 else {
213 miscSQLs.add(s);
214 }
215 }
216 }
217
218 unsyncBufferedReader.close();
219
220 for (Map.Entry<String, StringBundler> entry : insertSQLs.entrySet()) {
221 String tableName = entry.getKey();
222 StringBundler sb = entry.getValue();
223
224 if (sb.index() == 0) {
225 continue;
226 }
227
228 String insertSQL = db.buildSQL(sb.toString());
229
230 writeToInsertSQLFile(dir, tableName, insertSQLWriters, insertSQL);
231
232 Writer insertSQLWriter = insertSQLWriters.remove(tableName);
233
234 insertSQLWriter.write(";\n");
235
236 insertSQLWriter.close();
237 }
238
239 Writer miscSQLWriter = new FileWriter(new File(dir, "misc.sql"));
240
241 for (String miscSQL : miscSQLs) {
242 miscSQL = db.buildSQL(miscSQL);
243
244 miscSQLWriter.write(miscSQL);
245 miscSQLWriter.write(StringPool.NEW_LINE);
246 }
247
248 miscSQLWriter.close();
249 }
250
251 protected Writer createFileWriter(File file) throws IOException {
252 FileOutputStream fileOutputStream = new FileOutputStream(file);
253
254 Writer writer = new OutputStreamWriter(fileOutputStream);
255
256 return createUnsyncBufferedWriter(writer);
257 }
258
259 protected Writer createUnsyncBufferedWriter(Writer writer) {
260 return new UnsyncBufferedWriter(writer, _WRITER_BUFFER_SIZE) {
261
262 @Override
263 public void flush() {
264
265
266
267 }
268
269 };
270 }
271
272 protected Reader generateSQL() {
273 final CharPipe charPipe = new CharPipe(_PIPE_BUFFER_SIZE);
274
275 Thread thread = new Thread() {
276
277 @Override
278 public void run() {
279 try {
280 Writer sampleSQLWriter = new UnsyncTeeWriter(
281 createUnsyncBufferedWriter(charPipe.getWriter()),
282 createFileWriter(new File(_outputDir, "sample.sql")));
283
284 Map<String, Object> context = getContext();
285
286 FreeMarkerUtil.process(_script, context, sampleSQLWriter);
287
288 for (String csvFileName : _csvFileNames) {
289 Writer csvWriter = (Writer)context.get(
290 csvFileName + "CSVWriter");
291
292 csvWriter.close();
293 }
294
295 sampleSQLWriter.close();
296
297 charPipe.close();
298 }
299 catch (Exception e) {
300 e.printStackTrace();
301 }
302 }
303
304 };
305
306 thread.start();
307
308 return charPipe.getReader();
309 }
310
311 protected Map<String, Object> getContext() throws Exception {
312 Map<String, Object> context = new HashMap<String, Object>();
313
314 context.put("dataFactory", _dataFactory);
315
316 for (String csvFileName : _csvFileNames) {
317 Writer csvWriter = createFileWriter(
318 new File(_outputDir, csvFileName + ".csv"));
319
320 context.put(csvFileName + "CSVWriter", csvWriter);
321 }
322
323 return context;
324 }
325
326 protected Properties getProperties(String[] args) throws Exception {
327 Reader reader = null;
328
329 try {
330 Properties properties = new SortedProperties();
331
332 reader = new FileReader(args[0]);
333
334 properties.load(reader);
335
336 return properties;
337 }
338 finally {
339 if (reader != null) {
340 try {
341 reader.close();
342 }
343 catch (IOException ioe) {
344 ioe.printStackTrace();
345 }
346 }
347 }
348 }
349
350 protected void mergeSQL(File inputDir, File outputSQLFile)
351 throws IOException {
352
353 FileOutputStream outputSQLFileOutputStream = new FileOutputStream(
354 outputSQLFile);
355
356 FileChannel outputFileChannel = outputSQLFileOutputStream.getChannel();
357
358 File miscSQLFile = null;
359
360 for (File inputFile : inputDir.listFiles()) {
361 String inputFileName = inputFile.getName();
362
363 if (inputFileName.equals("misc.sql")) {
364 miscSQLFile = inputFile;
365
366 continue;
367 }
368
369 mergeSQL(inputFile, outputFileChannel);
370 }
371
372 if (miscSQLFile != null) {
373 mergeSQL(miscSQLFile, outputFileChannel);
374 }
375
376 outputFileChannel.close();
377 }
378
379 protected void mergeSQL(File inputFile, FileChannel outputFileChannel)
380 throws IOException {
381
382 FileInputStream inputFileInputStream = new FileInputStream(inputFile);
383
384 FileChannel inputFileChannel = inputFileInputStream.getChannel();
385
386 inputFileChannel.transferTo(
387 0, inputFileChannel.size(), outputFileChannel);
388
389 inputFileChannel.close();
390
391 inputFile.delete();
392 }
393
394 protected void writeToInsertSQLFile(
395 File dir, String tableName, Map<String, Writer> insertSQLWriters,
396 String insertSQL)
397 throws IOException {
398
399 Writer insertSQLWriter = insertSQLWriters.get(tableName);
400
401 if (insertSQLWriter == null) {
402 File file = new File(dir, tableName + ".sql");
403
404 insertSQLWriter = createFileWriter(file);
405
406 insertSQLWriters.put(tableName, insertSQLWriter);
407 }
408
409 insertSQLWriter.write(insertSQL);
410 }
411
412 private static final int _PIPE_BUFFER_SIZE = 16 * 1024 * 1024;
413
414 private static final int _WRITER_BUFFER_SIZE = 16 * 1024;
415
416 private String[] _csvFileNames;
417 private DataFactory _dataFactory;
418 private String _dbType;
419 private int _optimizeBufferSize;
420 private String _outputDir;
421 private String _script;
422
423 }