OmniSciDB  72c90bc290
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 
24 import org.apache.commons.cli.*;
25 import org.apache.thrift.TException;
26 import org.apache.thrift.protocol.TBinaryProtocol;
27 import org.apache.thrift.protocol.TJSONProtocol;
28 import org.apache.thrift.protocol.TProtocol;
29 import org.apache.thrift.transport.TSocket;
30 import org.apache.thrift.transport.TTransport;
31 import org.apache.thrift.transport.TTransportException;
32 import org.slf4j.Logger;
33 import org.slf4j.LoggerFactory;
34 
35 import java.io.BufferedReader;
36 import java.io.FileReader;
37 import java.io.IOException;
38 import java.math.BigDecimal;
39 import java.security.KeyStore;
40 import java.sql.*;
41 import java.time.*;
42 import java.util.ArrayList;
43 import java.util.List;
44 
45 import ai.heavy.thrift.server.*;
46 
47 interface DateTimeUtils {
48  long getSecondsFromMilliseconds(long milliseconds);
49 }
50 
51 class MutuallyExlusiveOptionsException extends ParseException {
52  protected MutuallyExlusiveOptionsException(String message) {
53  super(message);
54  }
55 
56  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
57  StringBuffer sb = new StringBuffer(
58  "Mutually exclusive options used. " + errMsg + ". Options provided [");
59  for (String s : strings) {
60  sb.append(s);
61  sb.append(" ");
62  }
63  sb.setCharAt(sb.length() - 1, ']');
64  return new MutuallyExlusiveOptionsException(sb.toString());
65  }
66 }
67 
69  private Options options = new Options();
70 
71  void printVersion() {
72  System.out.println("SQLImporter Version 4.6.0");
73  }
74 
76  StringBuffer sb = new StringBuffer("\nSQLImporter ");
77  // Ready for PKI auth
78  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
79  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
80  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
81  // sb.append("([--ca-trust-store <ca trust store file name>]
82  // --ca-trust-store-password
83  // <trust store password> | --insecure)\n");
84  sb.append(
85  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
86  sb.append(
87  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
88  sb.append(
89  "-t <HEAVYAI target table> -b <transfer buffer size> -f <table fragment size>\n");
90  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
91  sb.append("\nSQLImporter -h | --help\n\n");
92 
93  HelpFormatter formatter = new HelpFormatter();
94  // Forces help to print out options in order they were added rather
95  // than in alphabetical order
96  formatter.setOptionComparator(null);
97  int help_width = 100;
98  formatter.printHelp(help_width, sb.toString(), "", options, "");
99  }
100 
102  options.addOption("r", true, "Row Load Limit");
103 
104  // HEAVYAI authentication options
105  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
106  options.addOption(
107  Option.builder("u").hasArg().desc("HEAVYAI User").longOpt("user").build());
108  options.addOption(Option.builder("p")
109  .hasArg()
110  .desc("HEAVYAI Password")
111  .longOpt("passwd")
112  .build());
113  // HEAVYAI transport options
114  OptionGroup transport_grp = new OptionGroup();
115  transport_grp.addOption(Option.builder()
116  .desc("use binary transport to connect to HEAVYAI ")
117  .longOpt("binary")
118  .build());
119  transport_grp.addOption(Option.builder()
120  .desc("use http transport to connect to HEAVYAI ")
121  .longOpt("http")
122  .build());
123  transport_grp.addOption(Option.builder()
124  .desc("use https transport to connect to HEAVYAI ")
125  .longOpt("https")
126  .build());
127  options.addOptionGroup(transport_grp);
128 
129  // HEAVYAI database server details
130  options.addOption(Option.builder("s")
131  .hasArg()
132  .desc("HEAVYAI Server")
133  .longOpt("server")
134  .build());
135  options.addOption(Option.builder("db")
136  .hasArg()
137  .desc("HEAVYAI Database")
138  .longOpt("database")
139  .build());
140  options.addOption(
141  Option.builder().hasArg().desc("HEAVYAI Port").longOpt("port").build());
142 
143  // HEAVYAI server authentication options
144  options.addOption(Option.builder()
145  .hasArg()
146  .desc("CA certificate trust store")
147  .longOpt("ca-trust-store")
148  .build());
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store password")
152  .longOpt("ca-trust-store-passwd")
153  .build());
154  options.addOption(
155  Option.builder()
156  .desc("Insecure TLS - do not validate server HEAVYAI server credentials")
157  .longOpt("insecure")
158  .build());
159 
160  // Other database connection details
161  options.addOption(Option.builder("d")
162  .hasArg()
163  .desc("JDBC driver class")
164  .longOpt("driver")
165  .build());
166  options.addOption(Option.builder("c")
167  .hasArg()
168  .desc("JDBC Connection string")
169  .longOpt("jdbcConnect")
170  .required()
171  .build());
172  options.addOption(Option.builder("su")
173  .hasArg()
174  .desc("Source User")
175  .longOpt("sourceUser")
176  .required()
177  .build());
178  options.addOption(Option.builder("sp")
179  .hasArg()
180  .desc("Source Password")
181  .longOpt("sourcePasswd")
182  .required()
183  .build());
184  options.addOption(Option.builder("ss")
185  .hasArg()
186  .desc("SQL Select statement")
187  .longOpt("sqlStmt")
188  .required()
189  .build());
190 
191  options.addOption(Option.builder("t")
192  .hasArg()
193  .desc("HEAVYAI Target Table")
194  .longOpt("targetTable")
195  .required()
196  .build());
197 
198  options.addOption(Option.builder("b")
199  .hasArg()
200  .desc("transfer buffer size")
201  .longOpt("bufferSize")
202  .build());
203  options.addOption(Option.builder("f")
204  .hasArg()
205  .desc("table fragment size")
206  .longOpt("fragmentSize")
207  .build());
208 
209  options.addOption(Option.builder("tr")
210  .desc("Truncate table if it exists")
211  .longOpt("truncate")
212  .build());
213 
214  options.addOption(Option.builder("i")
215  .hasArg()
216  .desc("File containing init command for DB")
217  .longOpt("initializeFile")
218  .build());
219 
220  options.addOption(
221  Option.builder("adtf")
222  .desc("Allow double to float conversion, note precision will be reduced")
223  .longOpt("AllowDoubleToFloat")
224  .build());
225 
226  options.addOption(
227  Option.builder("ain")
228  .desc("Allow conversion from bigger integer types to smaller. Overflow might occur, "
229  + "use it only when casting is impossible")
230  .longOpt("AllowIntegerNarrowing")
231  .build());
232 
233  options.addOption(Option.builder("nlj")
234  .desc("Omit JDBC connection string from logs.")
235  .longOpt("no-log-jdbc-connection-string")
236  .build());
237  }
238 
239  private Option setOptionRequired(Option option) {
240  option.setRequired(true);
241  return option;
242  }
243 
244  public CommandLine parse(String[] args) throws ParseException {
245  CommandLineParser clp = new DefaultParser() {
246  public CommandLine parse(Options options, String[] strings) throws ParseException {
247  Options helpOptions = new Options();
248  helpOptions.addOption(
249  Option.builder("h").desc("help message").longOpt("help").build());
250  try {
251  CommandLine cmd = super.parse(helpOptions, strings);
252  } catch (UnrecognizedOptionException uE) {
253  }
254  if (cmd.hasOption("help")) {
256  exit(0);
257  }
258  if (cmd.hasOption("version")) {
259  printVersion();
260  exit(0);
261  }
262  cmd = super.parse(options, strings);
263  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
264  throw new MissingArgumentException(
265  "Must supply either an HEAVYAI db user or a user certificate");
266  }
267  // if user supplied must have password and visa versa
268  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
269  options.addOption(setOptionRequired(options.getOption("user")));
270  options.addOption(setOptionRequired(options.getOption("passwd")));
271  super.parse(options, strings);
272  }
273 
274  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
275  // and
276  // visa versa
277  if (false) {
278  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
279  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
280  options.addOption(
281  setOptionRequired(options.getOption("ca-trust-store-password")));
282  super.parse(options, strings);
283  }
284  if (options.getOption("user").isRequired()
285  && options.getOption("client-key").isRequired()) {
287  MutuallyExlusiveOptionsException.create(
288  "user/password can not be use with client-cert/client-key",
289  strings);
290  throw meo;
291  }
292 
293  if (cmd.hasOption("http")
294  || cmd.hasOption("binary")
295  && (cmd.hasOption("client-cert")
296  || cmd.hasOption("client-key"))) {
297  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
298  "http|binary can not be use with ca-cert|client-cert|client-key",
299  strings);
300  }
301  }
302 
303  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
304  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
305  "insecure can only be use with https", strings);
306  throw meo;
307  }
308 
309  return cmd;
310  }
311 
312  public CommandLine parse(Options options, String[] strings, boolean b)
313  throws ParseException {
314  return null;
315  }
316  };
317  return clp.parse(options, args);
318  }
319 }
320 
321 public class SQLImporter {
322  protected String session = null;
323  protected Heavy.Client client = null;
324  private CommandLine cmd = null;
325  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
326  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
327  return milliseconds / 1000;
328  };
329 
331 
332  public static void main(String[] args) {
333  SQLImporter sq = new SQLImporter();
334  sq.doWork(args);
335  }
336 
337  void doWork(String[] args) {
338  // create Options object
339 
340  SQLImporter_args s_args = new SQLImporter_args();
341 
342  try {
343  cmd = s_args.parse(args);
344  } catch (ParseException ex) {
345  LOGGER.error(ex.getLocalizedMessage());
346  s_args.printHelpMessage();
347  exit(0);
348  }
349  executeQuery();
350  }
351 
352  void executeQuery() {
353  Connection conn = null;
354  Statement stmt = null;
355 
356  long totalTime = 0;
357 
358  try {
359  // Open a connection
360  if (cmd.hasOption("nlj")) {
361  LOGGER.info("Connecting to source database.");
362  } else {
363  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
364  }
365  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
366  cmd.getOptionValue("sourceUser"),
367  cmd.getOptionValue("sourcePasswd"));
368  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
369  long startTime = System.currentTimeMillis();
370 
371  // run init file script on targe DB if present
372  if (cmd.hasOption("initializeFile")) {
373  run_init(conn);
374  }
375 
376  try {
378  conn.setAutoCommit(false);
379  }
380  } catch (SQLException se) {
381  LOGGER.warn(
382  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
383  + se.toString());
384  }
385 
386  // Execute a query
387  stmt = conn.createStatement();
388 
389  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
390  // set the jdbc fetch buffer size to reduce the amount of records being moved to
391  // java from postgress
392  stmt.setFetchSize(bufferSize);
393  long timer;
394 
395  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
396 
397  // check if table already exists and is compatible in HEAVYAI with the query
398  // metadata
399  ResultSetMetaData md = rs.getMetaData();
400  checkDBTable(conn, md);
401 
402  timer = System.currentTimeMillis();
403 
404  long resultCount = 0;
405  int bufferCount = 0;
406  long total = 0;
407 
408  List<TColumn> cols = new ArrayList(md.getColumnCount());
409  for (int i = 1; i <= md.getColumnCount(); i++) {
410  TColumn col = setupBinaryColumn(i, md, bufferSize);
411  cols.add(col);
412  }
413 
414  // read data from old DB
415  while (rs.next()) {
416  for (int i = 1; i <= md.getColumnCount(); i++) {
417  setColValue(rs,
418  cols.get(i - 1),
419  md.getColumnType(i),
420  i,
421  md.getScale(i),
422  md.getColumnTypeName(i));
423  }
424  resultCount++;
425  bufferCount++;
426  if (bufferCount == bufferSize) {
427  bufferCount = 0;
428  // send the buffer to HEAVY.AI
429  client.load_table_binary_columnar(
430  session, cmd.getOptionValue("targetTable"), cols, null);
431  // recreate columnar store for use
432  for (int i = 1; i <= md.getColumnCount(); i++) {
433  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
434  }
435 
436  if (resultCount % 100000 == 0) {
437  LOGGER.info("Imported " + resultCount + " records");
438  }
439  }
440  }
441  if (bufferCount > 0) {
442  // send the LAST buffer to HEAVY.AI
443  client.load_table_binary_columnar(
444  session, cmd.getOptionValue("targetTable"), cols, null);
445  bufferCount = 0;
446  }
447 
448  LOGGER.info("result set count is " + resultCount + " read time is "
449  + (System.currentTimeMillis() - timer) + "ms");
450 
451  // Clean-up environment
452  rs.close();
453  stmt.close();
454  conn.close();
455 
456  totalTime = System.currentTimeMillis() - startTime;
457  } catch (SQLException se) {
458  LOGGER.error("SQLException - " + se.toString());
459  se.printStackTrace();
460  } catch (TDBException ex) {
461  LOGGER.error("TDBException - " + ex.getError_msg());
462  ex.printStackTrace();
463  } catch (TException ex) {
464  LOGGER.error("TException failed - " + ex.toString());
465  ex.printStackTrace();
466  } finally {
467  // finally block used to close resources
468  try {
469  if (stmt != null) {
470  stmt.close();
471  }
472  } catch (SQLException se2) {
473  } // nothing we can do
474  try {
475  if (conn != null) {
476  conn.close();
477  }
478  } catch (SQLException se) {
479  LOGGER.error("SQlException in close - " + se.toString());
480  se.printStackTrace();
481  }
482  try {
483  if (session != null) {
484  client.disconnect(session);
485  }
486  } catch (TDBException ex) {
487  LOGGER.error("TDBException - in finalization " + ex.getError_msg());
488  ex.printStackTrace();
489  } catch (TException ex) {
490  LOGGER.error("TException - in finalization" + ex.toString());
491  ex.printStackTrace();
492  }
493  }
494  }
495 
496  private void run_init(Connection conn) {
497  // attempt to open file
498  String line = "";
499  try {
500  BufferedReader reader =
501  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
502  Statement stmt = conn.createStatement();
503  while ((line = reader.readLine()) != null) {
504  if (line.isEmpty()) {
505  continue;
506  }
507  LOGGER.info("Running : " + line);
508  stmt.execute(line);
509  }
510  stmt.close();
511  reader.close();
512  } catch (IOException e) {
513  LOGGER.error("Exception occurred trying to read initialize file: "
514  + cmd.getOptionValue("initFile"));
515  exit(1);
516  } catch (SQLException e) {
517  LOGGER.error(
518  "Exception occurred trying to execute initialize file entry : " + line);
519  exit(1);
520  }
521  }
522 
523  private void help(Options options) {
524  // automatically generate the help statement
525  HelpFormatter formatter = new HelpFormatter();
526  formatter.setOptionComparator(null); // get options in the order they are created
527  formatter.printHelp("SQLImporter", options);
528  }
529 
530  private void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
531  throws SQLException {
533  String tName = cmd.getOptionValue("targetTable");
534 
535  if (tableExists(tName)) {
536  // check if we want to truncate
537  if (cmd.hasOption("truncate")) {
538  executeDBCommand("Drop table " + tName);
539  createDBTable(otherdb_conn, md);
540  } else {
541  List<TColumnType> columnInfo = getColumnInfo(tName);
542  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
543  }
544  } else {
545  createDBTable(otherdb_conn, md);
546  }
547  }
548 
549  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
550  List<TColumnType> dstColumns,
551  ResultSetMetaData srcColumns) throws SQLException {
552  if (srcColumns.getColumnCount() != dstColumns.size()) {
553  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
554  + " versus Source " + srcColumns.getColumnCount());
555  exit(1);
556  }
557  for (int i = 1; i <= dstColumns.size(); ++i) {
558  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
559  srcColumns.getColumnName(i))) {
560  LOGGER.error(
561  "Destination table does not have matching column in same order for column number "
562  + i + " destination column name is " + dstColumns.get(i - 1).col_name
563  + " versus target column " + srcColumns.getColumnName(i));
564  exit(1);
565  }
566  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
567  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
568  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
569  int srcType = srcColumns.getColumnType(i);
570  int srcPrecision = srcColumns.getPrecision(i);
571  int srcScale = srcColumns.getScale(i);
572 
573  boolean match = false;
574  switch (srcType) {
575  case java.sql.Types.TINYINT:
576  match |= dstType == TDatumType.TINYINT;
577  // NOTE: it's okay to import smaller type to a bigger one,
578  // so we just fall through and try to match the next type.
579  // But the order of case statements is important here!
580  case java.sql.Types.SMALLINT:
581  match |= dstType == TDatumType.SMALLINT;
582  case java.sql.Types.INTEGER:
583  match |= dstType == TDatumType.INT;
584  case java.sql.Types.BIGINT:
585  match |= dstType == TDatumType.BIGINT;
586  if (cmd.hasOption("AllowIntegerNarrowing")) {
587  match |= dstType == TDatumType.TINYINT || dstType == TDatumType.SMALLINT
588  || dstType == TDatumType.INT;
589  }
590  break;
591  case java.sql.Types.DECIMAL:
592  case java.sql.Types.NUMERIC:
593  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
594  && dstScale == srcScale;
595  break;
596  case java.sql.Types.FLOAT:
597  case java.sql.Types.REAL:
598  match |= dstType == TDatumType.FLOAT;
599  // Fall through and try double
600  case java.sql.Types.DOUBLE:
601  match |= dstType == TDatumType.DOUBLE;
602  if (cmd.hasOption("AllowDoubleToFloat")) {
603  match |= dstType == TDatumType.FLOAT;
604  }
605  break;
606  case java.sql.Types.TIME:
607  match = dstType == TDatumType.TIME;
608  break;
609  case java.sql.Types.TIMESTAMP:
610  match = dstType == TDatumType.TIMESTAMP;
611  break;
612  case java.sql.Types.DATE:
613  match = dstType == TDatumType.DATE;
614  break;
615  case java.sql.Types.BOOLEAN:
616  case java.sql.Types
617  .BIT: // deal with postgres treating boolean as bit... this will bite me
618  match = dstType == TDatumType.BOOL;
619  break;
620  case java.sql.Types.NVARCHAR:
621  case java.sql.Types.VARCHAR:
622  case java.sql.Types.NCHAR:
623  case java.sql.Types.CHAR:
624  case java.sql.Types.LONGVARCHAR:
625  case java.sql.Types.LONGNVARCHAR:
626  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
627  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
628  || dstType == TDatumType.LINESTRING
629  || dstType == TDatumType.MULTILINESTRING
630  || dstType == TDatumType.MULTIPOINT);
631  break;
632  case java.sql.Types.OTHER:
633  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
634  // it makes no difference for OmniSciDB at the moment
635  Db_vendor_types.GisType gisType =
636  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
637  if (gisType.srid != dstScale) {
638  match = false;
639  break;
640  }
641  switch (dstType) {
642  case POINT:
643  match = gisType.type.equalsIgnoreCase("POINT");
644  break;
645  case MULTIPOINT:
646  match = gisType.type.equalsIgnoreCase("MULTIPOINT");
647  break;
648  case LINESTRING:
649  match = gisType.type.equalsIgnoreCase("LINESTRING");
650  break;
651  case MULTILINESTRING:
652  match = gisType.type.equalsIgnoreCase("MULTILINESTRING");
653  break;
654  case POLYGON:
655  match = gisType.type.equalsIgnoreCase("POLYGON");
656  break;
657  case MULTIPOLYGON:
658  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
659  break;
660  default:
661  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
662  + " not Supported");
663  exit(1);
664  }
665  break;
666  default:
667  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
668  + " not Supported");
669  exit(1);
670  }
671  if (!match) {
672  LOGGER.error("Source and destination types for column "
673  + srcColumns.getColumnName(i)
674  + " do not match. Please make sure that type, precision and scale are exactly the same");
675  exit(1);
676  }
677  }
678  }
679 
680  private void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData) {
681  StringBuilder sb = new StringBuilder();
682  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
683 
684  // Now iterate the metadata
685  try {
686  for (int i = 1; i <= metaData.getColumnCount(); i++) {
687  if (i > 1) {
688  sb.append(",");
689  }
690  LOGGER.debug("Column name is " + metaData.getColumnName(i));
691  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
692  LOGGER.debug("Column type is " + metaData.getColumnType(i));
693 
694  sb.append(metaData.getColumnName(i)).append(" ");
695  int col_type = metaData.getColumnType(i);
696  if (col_type == java.sql.Types.OTHER) {
697  Db_vendor_types.GisType type =
698  vendor_types.find_gis_type(otherdb_conn, metaData, i);
699  sb.append(Db_vendor_types.gis_type_to_str(type));
700  } else {
701  sb.append(getColType(metaData.getColumnType(i),
702  metaData.getPrecision(i),
703  metaData.getScale(i)));
704  }
705  }
706  sb.append(")");
707 
708  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
709  sb.append(" with (fragment_size = ");
710  sb.append(cmd.getOptionValue("fragmentSize", "0"));
711  sb.append(")");
712  }
713 
714  } catch (SQLException ex) {
715  LOGGER.error("Error processing the metadata - " + ex.toString());
716  exit(1);
717  }
718 
719  executeDBCommand(sb.toString());
720  }
721 
722  private void createDBConnection() {
723  TTransport transport = null;
724  TProtocol protocol = new TBinaryProtocol(transport);
725  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
726  String server = cmd.getOptionValue("server", "localhost");
727  try {
728  // Uses default certificate stores.
729  boolean load_trust_store = cmd.hasOption("https");
730  SockTransportProperties skT = null;
731  if (cmd.hasOption("https")) {
732  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
733  !cmd.hasOption("insecure"));
734  transport = skT.openHttpsClientTransport(server, port);
735  transport.open();
736  protocol = new TJSONProtocol(transport);
737  } else if (cmd.hasOption("http")) {
738  skT = SockTransportProperties.getUnencryptedClient();
739  transport = skT.openHttpClientTransport(server, port);
740  protocol = new TJSONProtocol(transport);
741  } else {
742  skT = SockTransportProperties.getUnencryptedClient();
743  transport = skT.openClientTransport(server, port);
744  transport.open();
745  protocol = new TBinaryProtocol(transport);
746  }
747 
748  client = new Heavy.Client(protocol);
749  // This if will be useless until PKI signon
750  if (cmd.hasOption("user")) {
751  session = client.connect(cmd.getOptionValue("user", "admin"),
752  cmd.getOptionValue("passwd", "HyperInteractive"),
753  cmd.getOptionValue("database", "omnisci"));
754  }
755  LOGGER.debug("Connected session is " + session);
756 
757  } catch (TTransportException ex) {
758  LOGGER.error("Connection failed - " + ex.toString());
759  exit(1);
760  } catch (TDBException ex) {
761  LOGGER.error("Connection failed - " + ex.getError_msg());
762  exit(2);
763  } catch (TException ex) {
764  LOGGER.error("Connection failed - " + ex.toString());
765  exit(3);
766  } catch (Exception ex) {
767  LOGGER.error("General exception - " + ex.toString());
768  exit(4);
769  }
770  }
771 
772  private List<TColumnType> getColumnInfo(String tName) {
773  LOGGER.debug("Getting columns for " + tName);
774  List<TColumnType> row_descriptor = null;
775  try {
776  TTableDetails table_details = client.get_table_details(session, tName);
777  row_descriptor = table_details.row_desc;
778  } catch (TDBException ex) {
779  LOGGER.error("column check failed - " + ex.getError_msg());
780  exit(3);
781  } catch (TException ex) {
782  LOGGER.error("column check failed - " + ex.toString());
783  exit(3);
784  }
785  return row_descriptor;
786  }
787 
788  private boolean tableExists(String tName) {
789  LOGGER.debug("Check for table " + tName);
790  try {
791  List<String> recv_get_tables = client.get_tables(session);
792  for (String s : recv_get_tables) {
793  if (s.equals(tName)) {
794  return true;
795  }
796  }
797  } catch (TDBException ex) {
798  LOGGER.error("Table check failed - " + ex.getError_msg());
799  exit(3);
800  } catch (TException ex) {
801  LOGGER.error("Table check failed - " + ex.toString());
802  exit(3);
803  }
804  return false;
805  }
806 
807  private void executeDBCommand(String sql) {
808  LOGGER.info("Run Command - " + sql);
809 
810  try {
811  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
812  } catch (TDBException ex) {
813  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
814  exit(1);
815  } catch (TException ex) {
816  LOGGER.error("SQL Execute failed - " + ex.toString());
817  exit(1);
818  }
819  }
820 
821  private String getColType(int cType, int precision, int scale) {
822  // Note - if cType is OTHER a earlier call will have been made
823  // to try and work out the db vendors specific type.
824  if (precision > 19) {
825  precision = 19;
826  }
827  if (scale > 19) {
828  scale = 18;
829  }
830  switch (cType) {
831  case java.sql.Types.TINYINT:
832  return ("TINYINT");
833  case java.sql.Types.SMALLINT:
834  return ("SMALLINT");
835  case java.sql.Types.INTEGER:
836  return ("INTEGER");
837  case java.sql.Types.BIGINT:
838  return ("BIGINT");
839  case java.sql.Types.FLOAT:
840  return ("FLOAT");
841  case java.sql.Types.DECIMAL:
842  return ("DECIMAL(" + precision + "," + scale + ")");
843  case java.sql.Types.DOUBLE:
844  return ("DOUBLE");
845  case java.sql.Types.REAL:
846  return ("REAL");
847  case java.sql.Types.NUMERIC:
848  return ("NUMERIC(" + precision + "," + scale + ")");
849  case java.sql.Types.TIME:
850  return ("TIME");
851  case java.sql.Types.TIMESTAMP:
852  return ("TIMESTAMP");
853  case java.sql.Types.DATE:
854  return ("DATE");
855  case java.sql.Types.BOOLEAN:
856  case java.sql.Types
857  .BIT: // deal with postgress treating boolean as bit... this will bite me
858  return ("BOOLEAN");
859  case java.sql.Types.NVARCHAR:
860  case java.sql.Types.VARCHAR:
861  case java.sql.Types.NCHAR:
862  case java.sql.Types.CHAR:
863  case java.sql.Types.LONGVARCHAR:
864  case java.sql.Types.LONGNVARCHAR:
865  return ("TEXT ENCODING DICT");
866  default:
867  throw new AssertionError("Column type " + cType + " not Supported");
868  }
869  }
870 
871  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
872  throws SQLException {
873  TColumn col = new TColumn();
874 
875  col.nulls = new ArrayList<Boolean>(bufferSize);
876 
877  col.data = new TColumnData();
878 
879  switch (md.getColumnType(i)) {
880  case java.sql.Types.TINYINT:
881  case java.sql.Types.SMALLINT:
882  case java.sql.Types.INTEGER:
883  case java.sql.Types.BIGINT:
884  case java.sql.Types.TIME:
885  case java.sql.Types.TIMESTAMP:
886  case java.sql.Types
887  .BIT: // deal with postgress treating boolean as bit... this will bite me
888  case java.sql.Types.BOOLEAN:
889  case java.sql.Types.DATE:
890  case java.sql.Types.DECIMAL:
891  case java.sql.Types.NUMERIC:
892  col.data.int_col = new ArrayList<Long>(bufferSize);
893  break;
894 
895  case java.sql.Types.FLOAT:
896  case java.sql.Types.DOUBLE:
897  case java.sql.Types.REAL:
898  col.data.real_col = new ArrayList<Double>(bufferSize);
899  break;
900 
901  case java.sql.Types.NVARCHAR:
902  case java.sql.Types.VARCHAR:
903  case java.sql.Types.NCHAR:
904  case java.sql.Types.CHAR:
905  case java.sql.Types.LONGVARCHAR:
906  case java.sql.Types.LONGNVARCHAR:
907  case java.sql.Types.OTHER:
908  col.data.str_col = new ArrayList<String>(bufferSize);
909  break;
910 
911  default:
912  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
913  }
914  return col;
915  }
916 
917  private void setColValue(ResultSet rs,
918  TColumn col,
919  int columnType,
920  int colNum,
921  int scale,
922  String colTypeName) throws SQLException {
923  switch (columnType) {
924  case java.sql.Types
925  .BIT: // deal with postgress treating boolean as bit... this will bite me
926  case java.sql.Types.BOOLEAN:
927  Boolean b = rs.getBoolean(colNum);
928  if (rs.wasNull()) {
929  col.nulls.add(Boolean.TRUE);
930  col.data.int_col.add(0L);
931  } else {
932  col.nulls.add(Boolean.FALSE);
933  col.data.int_col.add(b ? 1L : 0L);
934  }
935  break;
936 
937  case java.sql.Types.DECIMAL:
938  case java.sql.Types.NUMERIC:
939  BigDecimal bd = rs.getBigDecimal(colNum);
940  if (rs.wasNull()) {
941  col.nulls.add(Boolean.TRUE);
942  col.data.int_col.add(0L);
943  } else {
944  col.nulls.add(Boolean.FALSE);
945  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
946  }
947  break;
948 
949  case java.sql.Types.TINYINT:
950  case java.sql.Types.SMALLINT:
951  case java.sql.Types.INTEGER:
952  case java.sql.Types.BIGINT:
953  Long l = rs.getLong(colNum);
954  if (rs.wasNull()) {
955  col.nulls.add(Boolean.TRUE);
956  col.data.int_col.add(new Long(0));
957  } else {
958  col.nulls.add(Boolean.FALSE);
959  col.data.int_col.add(l);
960  }
961  break;
962 
963  case java.sql.Types.TIME:
964  Time t = rs.getTime(colNum);
965  if (rs.wasNull()) {
966  col.nulls.add(Boolean.TRUE);
967  col.data.int_col.add(0L);
968 
969  } else {
970  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
971  col.nulls.add(Boolean.FALSE);
972  }
973 
974  break;
975  case java.sql.Types.TIMESTAMP:
976  Timestamp ts = rs.getTimestamp(colNum);
977  if (rs.wasNull()) {
978  col.nulls.add(Boolean.TRUE);
979  col.data.int_col.add(0L);
980 
981  } else {
982  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
983  col.nulls.add(Boolean.FALSE);
984  }
985 
986  break;
987  case java.sql.Types.DATE:
988  Date d = rs.getDate(colNum);
989  if (rs.wasNull()) {
990  col.nulls.add(Boolean.TRUE);
991  col.data.int_col.add(0L);
992 
993  } else {
994  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
995  col.nulls.add(Boolean.FALSE);
996  }
997  break;
998  case java.sql.Types.FLOAT:
999  case java.sql.Types.DOUBLE:
1000  case java.sql.Types.REAL:
1001  Double db = rs.getDouble(colNum);
1002  if (rs.wasNull()) {
1003  col.nulls.add(Boolean.TRUE);
1004  col.data.real_col.add(new Double(0));
1005 
1006  } else {
1007  col.nulls.add(Boolean.FALSE);
1008  col.data.real_col.add(db);
1009  }
1010  break;
1011 
1012  case java.sql.Types.NVARCHAR:
1013  case java.sql.Types.VARCHAR:
1014  case java.sql.Types.NCHAR:
1015  case java.sql.Types.CHAR:
1016  case java.sql.Types.LONGVARCHAR:
1017  case java.sql.Types.LONGNVARCHAR:
1018  String strVal = rs.getString(colNum);
1019  if (rs.wasNull()) {
1020  col.nulls.add(Boolean.TRUE);
1021  col.data.str_col.add("");
1022 
1023  } else {
1024  col.data.str_col.add(strVal);
1025  col.nulls.add(Boolean.FALSE);
1026  }
1027  break;
1028  case java.sql.Types.OTHER:
1029  Object objVal = rs.getObject(colNum);
1030  if (rs.wasNull()) {
1031  col.nulls.add(Boolean.TRUE);
1032  col.data.str_col.add("");
1033  } else {
1034  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1035  col.nulls.add(Boolean.FALSE);
1036  }
1037  break;
1038  default:
1039  throw new AssertionError("Column type " + columnType + " not Supported");
1040  }
1041  }
1042 
1043  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1044  throws SQLException {
1045  col.nulls.clear();
1046 
1047  switch (md.getColumnType(i)) {
1048  case java.sql.Types.TINYINT:
1049  case java.sql.Types.SMALLINT:
1050  case java.sql.Types.INTEGER:
1051  case java.sql.Types.BIGINT:
1052  case java.sql.Types.TIME:
1053  case java.sql.Types.TIMESTAMP:
1054  case java.sql.Types
1055  .BIT: // deal with postgress treating boolean as bit... this will bite me
1056  case java.sql.Types.BOOLEAN:
1057  case java.sql.Types.DATE:
1058  case java.sql.Types.DECIMAL:
1059  case java.sql.Types.NUMERIC:
1060  col.data.int_col.clear();
1061  break;
1062 
1063  case java.sql.Types.FLOAT:
1064  case java.sql.Types.DOUBLE:
1065  case java.sql.Types.REAL:
1066  col.data.real_col.clear();
1067  break;
1068 
1069  case java.sql.Types.NVARCHAR:
1070  case java.sql.Types.VARCHAR:
1071  case java.sql.Types.NCHAR:
1072  case java.sql.Types.CHAR:
1073  case java.sql.Types.LONGVARCHAR:
1074  case java.sql.Types.LONGNVARCHAR:
1075  case java.sql.Types.OTHER:
1076  col.data.str_col.clear();
1077  break;
1078  default:
1079  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1080  }
1081  }
1082 }
void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:158
String getColType(int cType, int precision, int scale)
void executeDBCommand(String sql)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
tuple conn
Definition: report.py:41
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)