OmniSciDB  f17484ade4
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 
24 import org.apache.commons.cli.*;
25 import org.apache.thrift.TException;
26 import org.apache.thrift.protocol.TBinaryProtocol;
27 import org.apache.thrift.protocol.TJSONProtocol;
28 import org.apache.thrift.protocol.TProtocol;
29 import org.apache.thrift.transport.TSocket;
30 import org.apache.thrift.transport.TTransport;
31 import org.apache.thrift.transport.TTransportException;
32 import org.slf4j.Logger;
33 import org.slf4j.LoggerFactory;
34 
35 import java.io.BufferedReader;
36 import java.io.FileReader;
37 import java.io.IOException;
38 import java.math.BigDecimal;
39 import java.security.KeyStore;
40 import java.sql.*;
41 import java.time.*;
42 import java.util.ArrayList;
43 import java.util.List;
44 
45 import ai.heavy.thrift.server.*;
46 
47 interface DateTimeUtils {
48  long getSecondsFromMilliseconds(long milliseconds);
49 }
50 
51 class MutuallyExlusiveOptionsException extends ParseException {
52  protected MutuallyExlusiveOptionsException(String message) {
53  super(message);
54  }
55 
56  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
57  StringBuffer sb = new StringBuffer(
58  "Mutually exclusive options used. " + errMsg + ". Options provided [");
59  for (String s : strings) {
60  sb.append(s);
61  sb.append(" ");
62  }
63  sb.setCharAt(sb.length() - 1, ']');
64  return new MutuallyExlusiveOptionsException(sb.toString());
65  }
66 }
67 
69  private Options options = new Options();
70 
71  void printVersion() {
72  System.out.println("SQLImporter Version 4.6.0");
73  }
74 
76  StringBuffer sb = new StringBuffer("\nSQLImporter ");
77  // Ready for PKI auth
78  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
79  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
80  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
81  // sb.append("([--ca-trust-store <ca trust store file name>]
82  // --ca-trust-store-password
83  // <trust store password> | --insecure)\n");
84  sb.append(
85  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
86  sb.append(
87  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
88  sb.append(
89  "-t <HEAVYAI target table> -b <transfer buffer size> -f <table fragment size>\n");
90  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
91  sb.append("\nSQLImporter -h | --help\n\n");
92 
93  HelpFormatter formatter = new HelpFormatter();
94  // Forces help to print out options in order they were added rather
95  // than in alphabetical order
96  formatter.setOptionComparator(null);
97  int help_width = 100;
98  formatter.printHelp(help_width, sb.toString(), "", options, "");
99  }
100 
102  options.addOption("r", true, "Row Load Limit");
103 
104  // HEAVYAI authentication options
105  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
106  options.addOption(
107  Option.builder("u").hasArg().desc("HEAVYAI User").longOpt("user").build());
108  options.addOption(Option.builder("p")
109  .hasArg()
110  .desc("HEAVYAI Password")
111  .longOpt("passwd")
112  .build());
113  // HEAVYAI transport options
114  OptionGroup transport_grp = new OptionGroup();
115  transport_grp.addOption(Option.builder()
116  .desc("use binary transport to connect to HEAVYAI ")
117  .longOpt("binary")
118  .build());
119  transport_grp.addOption(Option.builder()
120  .desc("use http transport to connect to HEAVYAI ")
121  .longOpt("http")
122  .build());
123  transport_grp.addOption(Option.builder()
124  .desc("use https transport to connect to HEAVYAI ")
125  .longOpt("https")
126  .build());
127  options.addOptionGroup(transport_grp);
128 
129  // HEAVYAI database server details
130  options.addOption(Option.builder("s")
131  .hasArg()
132  .desc("HEAVYAI Server")
133  .longOpt("server")
134  .build());
135  options.addOption(Option.builder("db")
136  .hasArg()
137  .desc("HEAVYAI Database")
138  .longOpt("database")
139  .build());
140  options.addOption(
141  Option.builder().hasArg().desc("HEAVYAI Port").longOpt("port").build());
142 
143  // HEAVYAI server authentication options
144  options.addOption(Option.builder()
145  .hasArg()
146  .desc("CA certificate trust store")
147  .longOpt("ca-trust-store")
148  .build());
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store password")
152  .longOpt("ca-trust-store-passwd")
153  .build());
154  options.addOption(
155  Option.builder()
156  .desc("Insecure TLS - do not validate server HEAVYAI server credentials")
157  .longOpt("insecure")
158  .build());
159 
160  // Other database connection details
161  options.addOption(Option.builder("d")
162  .hasArg()
163  .desc("JDBC driver class")
164  .longOpt("driver")
165  .build());
166  options.addOption(Option.builder("c")
167  .hasArg()
168  .desc("JDBC Connection string")
169  .longOpt("jdbcConnect")
170  .required()
171  .build());
172  options.addOption(Option.builder("su")
173  .hasArg()
174  .desc("Source User")
175  .longOpt("sourceUser")
176  .required()
177  .build());
178  options.addOption(Option.builder("sp")
179  .hasArg()
180  .desc("Source Password")
181  .longOpt("sourcePasswd")
182  .required()
183  .build());
184  options.addOption(Option.builder("ss")
185  .hasArg()
186  .desc("SQL Select statement")
187  .longOpt("sqlStmt")
188  .required()
189  .build());
190 
191  options.addOption(Option.builder("t")
192  .hasArg()
193  .desc("HEAVYAI Target Table")
194  .longOpt("targetTable")
195  .required()
196  .build());
197 
198  options.addOption(Option.builder("b")
199  .hasArg()
200  .desc("transfer buffer size")
201  .longOpt("bufferSize")
202  .build());
203  options.addOption(Option.builder("f")
204  .hasArg()
205  .desc("table fragment size")
206  .longOpt("fragmentSize")
207  .build());
208 
209  options.addOption(Option.builder("tr")
210  .desc("Truncate table if it exists")
211  .longOpt("truncate")
212  .build());
213 
214  options.addOption(Option.builder("i")
215  .hasArg()
216  .desc("File containing init command for DB")
217  .longOpt("initializeFile")
218  .build());
219 
220  options.addOption(
221  Option.builder("adtf")
222  .desc("Allow double to float conversion, note precision will be reduced")
223  .longOpt("AllowDoubleToFloat")
224  .build());
225 
226  options.addOption(
227  Option.builder("ain")
228  .desc("Allow conversion from bigger integer types to smaller. Overflow might occur, "
229  + "use it only when casting is impossible")
230  .longOpt("AllowIntegerNarrowing")
231  .build());
232  }
233 
234  private Option setOptionRequired(Option option) {
235  option.setRequired(true);
236  return option;
237  }
238 
239  public CommandLine parse(String[] args) throws ParseException {
240  CommandLineParser clp = new DefaultParser() {
241  public CommandLine parse(Options options, String[] strings) throws ParseException {
242  Options helpOptions = new Options();
243  helpOptions.addOption(
244  Option.builder("h").desc("help message").longOpt("help").build());
245  try {
246  CommandLine cmd = super.parse(helpOptions, strings);
247  } catch (UnrecognizedOptionException uE) {
248  }
249  if (cmd.hasOption("help")) {
251  exit(0);
252  }
253  if (cmd.hasOption("version")) {
254  printVersion();
255  exit(0);
256  }
257  cmd = super.parse(options, strings);
258  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
259  throw new MissingArgumentException(
260  "Must supply either an HEAVYAI db user or a user certificate");
261  }
262  // if user supplied must have password and visa versa
263  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
264  options.addOption(setOptionRequired(options.getOption("user")));
265  options.addOption(setOptionRequired(options.getOption("passwd")));
266  super.parse(options, strings);
267  }
268 
269  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
270  // and
271  // visa versa
272  if (false) {
273  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
274  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
275  options.addOption(
276  setOptionRequired(options.getOption("ca-trust-store-password")));
277  super.parse(options, strings);
278  }
279  if (options.getOption("user").isRequired()
280  && options.getOption("client-key").isRequired()) {
282  MutuallyExlusiveOptionsException.create(
283  "user/password can not be use with client-cert/client-key",
284  strings);
285  throw meo;
286  }
287 
288  if (cmd.hasOption("http")
289  || cmd.hasOption("binary")
290  && (cmd.hasOption("client-cert")
291  || cmd.hasOption("client-key"))) {
292  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
293  "http|binary can not be use with ca-cert|client-cert|client-key",
294  strings);
295  }
296  }
297 
298  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
299  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
300  "insecure can only be use with https", strings);
301  throw meo;
302  }
303 
304  return cmd;
305  }
306 
307  public CommandLine parse(Options options, String[] strings, boolean b)
308  throws ParseException {
309  return null;
310  }
311  };
312  return clp.parse(options, args);
313  }
314 }
315 
316 public class SQLImporter {
317  protected String session = null;
318  protected Heavy.Client client = null;
319  private CommandLine cmd = null;
320  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
321  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
322  return milliseconds / 1000;
323  };
324 
326 
327  public static void main(String[] args) {
328  SQLImporter sq = new SQLImporter();
329  sq.doWork(args);
330  }
331 
332  void doWork(String[] args) {
333  // create Options object
334 
335  SQLImporter_args s_args = new SQLImporter_args();
336 
337  try {
338  cmd = s_args.parse(args);
339  } catch (ParseException ex) {
340  LOGGER.error(ex.getLocalizedMessage());
341  s_args.printHelpMessage();
342  exit(0);
343  }
344  executeQuery();
345  }
346 
347  void executeQuery() {
348  Connection conn = null;
349  Statement stmt = null;
350 
351  long totalTime = 0;
352 
353  try {
354  // Open a connection
355  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
356  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
357  cmd.getOptionValue("sourceUser"),
358  cmd.getOptionValue("sourcePasswd"));
359  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
360  long startTime = System.currentTimeMillis();
361 
362  // run init file script on targe DB if present
363  if (cmd.hasOption("initializeFile")) {
364  run_init(conn);
365  }
366 
367  try {
369  conn.setAutoCommit(false);
370  }
371  } catch (SQLException se) {
372  LOGGER.warn(
373  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
374  + se.toString());
375  }
376 
377  // Execute a query
378  stmt = conn.createStatement();
379 
380  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
381  // set the jdbc fetch buffer size to reduce the amount of records being moved to
382  // java from postgress
383  stmt.setFetchSize(bufferSize);
384  long timer;
385 
386  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
387 
388  // check if table already exists and is compatible in HEAVYAI with the query
389  // metadata
390  ResultSetMetaData md = rs.getMetaData();
391  checkDBTable(conn, md);
392 
393  timer = System.currentTimeMillis();
394 
395  long resultCount = 0;
396  int bufferCount = 0;
397  long total = 0;
398 
399  List<TColumn> cols = new ArrayList(md.getColumnCount());
400  for (int i = 1; i <= md.getColumnCount(); i++) {
401  TColumn col = setupBinaryColumn(i, md, bufferSize);
402  cols.add(col);
403  }
404 
405  // read data from old DB
406  while (rs.next()) {
407  for (int i = 1; i <= md.getColumnCount(); i++) {
408  setColValue(rs,
409  cols.get(i - 1),
410  md.getColumnType(i),
411  i,
412  md.getScale(i),
413  md.getColumnTypeName(i));
414  }
415  resultCount++;
416  bufferCount++;
417  if (bufferCount == bufferSize) {
418  bufferCount = 0;
419  // send the buffer to HEAVY.AI
420  client.load_table_binary_columnar(
421  session, cmd.getOptionValue("targetTable"), cols, null);
422  // recreate columnar store for use
423  for (int i = 1; i <= md.getColumnCount(); i++) {
424  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
425  }
426 
427  if (resultCount % 100000 == 0) {
428  LOGGER.info("Imported " + resultCount + " records");
429  }
430  }
431  }
432  if (bufferCount > 0) {
433  // send the LAST buffer to HEAVY.AI
434  client.load_table_binary_columnar(
435  session, cmd.getOptionValue("targetTable"), cols, null);
436  bufferCount = 0;
437  }
438 
439  LOGGER.info("result set count is " + resultCount + " read time is "
440  + (System.currentTimeMillis() - timer) + "ms");
441 
442  // Clean-up environment
443  rs.close();
444  stmt.close();
445  conn.close();
446 
447  totalTime = System.currentTimeMillis() - startTime;
448  } catch (SQLException se) {
449  LOGGER.error("SQLException - " + se.toString());
450  se.printStackTrace();
451  } catch (TDBException ex) {
452  LOGGER.error("TDBException - " + ex.getError_msg());
453  ex.printStackTrace();
454  } catch (TException ex) {
455  LOGGER.error("TException failed - " + ex.toString());
456  ex.printStackTrace();
457  } finally {
458  // finally block used to close resources
459  try {
460  if (stmt != null) {
461  stmt.close();
462  }
463  } catch (SQLException se2) {
464  } // nothing we can do
465  try {
466  if (conn != null) {
467  conn.close();
468  }
469  } catch (SQLException se) {
470  LOGGER.error("SQlException in close - " + se.toString());
471  se.printStackTrace();
472  }
473  try {
474  if (session != null) {
475  client.disconnect(session);
476  }
477  } catch (TDBException ex) {
478  LOGGER.error("TDBException - in finalization " + ex.getError_msg());
479  ex.printStackTrace();
480  } catch (TException ex) {
481  LOGGER.error("TException - in finalization" + ex.toString());
482  ex.printStackTrace();
483  }
484  }
485  }
486 
487  private void run_init(Connection conn) {
488  // attempt to open file
489  String line = "";
490  try {
491  BufferedReader reader =
492  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
493  Statement stmt = conn.createStatement();
494  while ((line = reader.readLine()) != null) {
495  if (line.isEmpty()) {
496  continue;
497  }
498  LOGGER.info("Running : " + line);
499  stmt.execute(line);
500  }
501  stmt.close();
502  reader.close();
503  } catch (IOException e) {
504  LOGGER.error("Exception occurred trying to read initialize file: "
505  + cmd.getOptionValue("initFile"));
506  exit(1);
507  } catch (SQLException e) {
508  LOGGER.error(
509  "Exception occurred trying to execute initialize file entry : " + line);
510  exit(1);
511  }
512  }
513 
514  private void help(Options options) {
515  // automatically generate the help statement
516  HelpFormatter formatter = new HelpFormatter();
517  formatter.setOptionComparator(null); // get options in the order they are created
518  formatter.printHelp("SQLImporter", options);
519  }
520 
521  private void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
522  throws SQLException {
524  String tName = cmd.getOptionValue("targetTable");
525 
526  if (tableExists(tName)) {
527  // check if we want to truncate
528  if (cmd.hasOption("truncate")) {
529  executeDBCommand("Drop table " + tName);
530  createDBTable(otherdb_conn, md);
531  } else {
532  List<TColumnType> columnInfo = getColumnInfo(tName);
533  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
534  }
535  } else {
536  createDBTable(otherdb_conn, md);
537  }
538  }
539 
540  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
541  List<TColumnType> dstColumns,
542  ResultSetMetaData srcColumns) throws SQLException {
543  if (srcColumns.getColumnCount() != dstColumns.size()) {
544  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
545  + " versus Source " + srcColumns.getColumnCount());
546  exit(1);
547  }
548  for (int i = 1; i <= dstColumns.size(); ++i) {
549  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
550  srcColumns.getColumnName(i))) {
551  LOGGER.error(
552  "Destination table does not have matching column in same order for column number "
553  + i + " destination column name is " + dstColumns.get(i - 1).col_name
554  + " versus target column " + srcColumns.getColumnName(i));
555  exit(1);
556  }
557  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
558  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
559  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
560  int srcType = srcColumns.getColumnType(i);
561  int srcPrecision = srcColumns.getPrecision(i);
562  int srcScale = srcColumns.getScale(i);
563 
564  boolean match = false;
565  switch (srcType) {
566  case java.sql.Types.TINYINT:
567  match |= dstType == TDatumType.TINYINT;
568  // NOTE: it's okay to import smaller type to a bigger one,
569  // so we just fall through and try to match the next type.
570  // But the order of case statements is important here!
571  case java.sql.Types.SMALLINT:
572  match |= dstType == TDatumType.SMALLINT;
573  case java.sql.Types.INTEGER:
574  match |= dstType == TDatumType.INT;
575  case java.sql.Types.BIGINT:
576  match |= dstType == TDatumType.BIGINT;
577  if (cmd.hasOption("AllowIntegerNarrowing")) {
578  match |= dstType == TDatumType.TINYINT || dstType == TDatumType.SMALLINT
579  || dstType == TDatumType.INT;
580  }
581  break;
582  case java.sql.Types.DECIMAL:
583  case java.sql.Types.NUMERIC:
584  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
585  && dstScale == srcScale;
586  break;
587  case java.sql.Types.FLOAT:
588  case java.sql.Types.REAL:
589  match |= dstType == TDatumType.FLOAT;
590  // Fall through and try double
591  case java.sql.Types.DOUBLE:
592  match |= dstType == TDatumType.DOUBLE;
593  if (cmd.hasOption("AllowDoubleToFloat")) {
594  match |= dstType == TDatumType.FLOAT;
595  }
596  break;
597  case java.sql.Types.TIME:
598  match = dstType == TDatumType.TIME;
599  break;
600  case java.sql.Types.TIMESTAMP:
601  match = dstType == TDatumType.TIMESTAMP;
602  break;
603  case java.sql.Types.DATE:
604  match = dstType == TDatumType.DATE;
605  break;
606  case java.sql.Types.BOOLEAN:
607  case java.sql.Types
608  .BIT: // deal with postgres treating boolean as bit... this will bite me
609  match = dstType == TDatumType.BOOL;
610  break;
611  case java.sql.Types.NVARCHAR:
612  case java.sql.Types.VARCHAR:
613  case java.sql.Types.NCHAR:
614  case java.sql.Types.CHAR:
615  case java.sql.Types.LONGVARCHAR:
616  case java.sql.Types.LONGNVARCHAR:
617  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
618  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
619  || dstType == TDatumType.LINESTRING
620  || dstType == TDatumType.MULTILINESTRING
621  || dstType == TDatumType.MULTIPOINT);
622  break;
623  case java.sql.Types.OTHER:
624  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
625  // it makes no difference for OmniSciDB at the moment
626  Db_vendor_types.GisType gisType =
627  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
628  if (gisType.srid != dstScale) {
629  match = false;
630  break;
631  }
632  switch (dstType) {
633  case POINT:
634  match = gisType.type.equalsIgnoreCase("POINT");
635  break;
636  case MULTIPOINT:
637  match = gisType.type.equalsIgnoreCase("MULTIPOINT");
638  break;
639  case LINESTRING:
640  match = gisType.type.equalsIgnoreCase("LINESTRING");
641  break;
642  case MULTILINESTRING:
643  match = gisType.type.equalsIgnoreCase("MULTILINESTRING");
644  break;
645  case POLYGON:
646  match = gisType.type.equalsIgnoreCase("POLYGON");
647  break;
648  case MULTIPOLYGON:
649  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
650  break;
651  default:
652  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
653  + " not Supported");
654  exit(1);
655  }
656  break;
657  default:
658  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
659  + " not Supported");
660  exit(1);
661  }
662  if (!match) {
663  LOGGER.error("Source and destination types for column "
664  + srcColumns.getColumnName(i)
665  + " do not match. Please make sure that type, precision and scale are exactly the same");
666  exit(1);
667  }
668  }
669  }
670 
671  private void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData) {
672  StringBuilder sb = new StringBuilder();
673  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
674 
675  // Now iterate the metadata
676  try {
677  for (int i = 1; i <= metaData.getColumnCount(); i++) {
678  if (i > 1) {
679  sb.append(",");
680  }
681  LOGGER.debug("Column name is " + metaData.getColumnName(i));
682  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
683  LOGGER.debug("Column type is " + metaData.getColumnType(i));
684 
685  sb.append(metaData.getColumnName(i)).append(" ");
686  int col_type = metaData.getColumnType(i);
687  if (col_type == java.sql.Types.OTHER) {
688  Db_vendor_types.GisType type =
689  vendor_types.find_gis_type(otherdb_conn, metaData, i);
690  sb.append(Db_vendor_types.gis_type_to_str(type));
691  } else {
692  sb.append(getColType(metaData.getColumnType(i),
693  metaData.getPrecision(i),
694  metaData.getScale(i)));
695  }
696  }
697  sb.append(")");
698 
699  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
700  sb.append(" with (fragment_size = ");
701  sb.append(cmd.getOptionValue("fragmentSize", "0"));
702  sb.append(")");
703  }
704 
705  } catch (SQLException ex) {
706  LOGGER.error("Error processing the metadata - " + ex.toString());
707  exit(1);
708  }
709 
710  executeDBCommand(sb.toString());
711  }
712 
713  private void createDBConnection() {
714  TTransport transport = null;
715  TProtocol protocol = new TBinaryProtocol(transport);
716  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
717  String server = cmd.getOptionValue("server", "localhost");
718  try {
719  // Uses default certificate stores.
720  boolean load_trust_store = cmd.hasOption("https");
721  SockTransportProperties skT = null;
722  if (cmd.hasOption("https")) {
723  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
724  !cmd.hasOption("insecure"));
725  transport = skT.openHttpsClientTransport(server, port);
726  transport.open();
727  protocol = new TJSONProtocol(transport);
728  } else if (cmd.hasOption("http")) {
729  skT = SockTransportProperties.getUnencryptedClient();
730  transport = skT.openHttpClientTransport(server, port);
731  protocol = new TJSONProtocol(transport);
732  } else {
733  skT = SockTransportProperties.getUnencryptedClient();
734  transport = skT.openClientTransport(server, port);
735  transport.open();
736  protocol = new TBinaryProtocol(transport);
737  }
738 
739  client = new Heavy.Client(protocol);
740  // This if will be useless until PKI signon
741  if (cmd.hasOption("user")) {
742  session = client.connect(cmd.getOptionValue("user", "admin"),
743  cmd.getOptionValue("passwd", "HyperInteractive"),
744  cmd.getOptionValue("database", "omnisci"));
745  }
746  LOGGER.debug("Connected session is " + session);
747 
748  } catch (TTransportException ex) {
749  LOGGER.error("Connection failed - " + ex.toString());
750  exit(1);
751  } catch (TDBException ex) {
752  LOGGER.error("Connection failed - " + ex.getError_msg());
753  exit(2);
754  } catch (TException ex) {
755  LOGGER.error("Connection failed - " + ex.toString());
756  exit(3);
757  } catch (Exception ex) {
758  LOGGER.error("General exception - " + ex.toString());
759  exit(4);
760  }
761  }
762 
763  private List<TColumnType> getColumnInfo(String tName) {
764  LOGGER.debug("Getting columns for " + tName);
765  List<TColumnType> row_descriptor = null;
766  try {
767  TTableDetails table_details = client.get_table_details(session, tName);
768  row_descriptor = table_details.row_desc;
769  } catch (TDBException ex) {
770  LOGGER.error("column check failed - " + ex.getError_msg());
771  exit(3);
772  } catch (TException ex) {
773  LOGGER.error("column check failed - " + ex.toString());
774  exit(3);
775  }
776  return row_descriptor;
777  }
778 
779  private boolean tableExists(String tName) {
780  LOGGER.debug("Check for table " + tName);
781  try {
782  List<String> recv_get_tables = client.get_tables(session);
783  for (String s : recv_get_tables) {
784  if (s.equals(tName)) {
785  return true;
786  }
787  }
788  } catch (TDBException ex) {
789  LOGGER.error("Table check failed - " + ex.getError_msg());
790  exit(3);
791  } catch (TException ex) {
792  LOGGER.error("Table check failed - " + ex.toString());
793  exit(3);
794  }
795  return false;
796  }
797 
798  private void executeDBCommand(String sql) {
799  LOGGER.info("Run Command - " + sql);
800 
801  try {
802  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
803  } catch (TDBException ex) {
804  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
805  exit(1);
806  } catch (TException ex) {
807  LOGGER.error("SQL Execute failed - " + ex.toString());
808  exit(1);
809  }
810  }
811 
812  private String getColType(int cType, int precision, int scale) {
813  // Note - if cType is OTHER a earlier call will have been made
814  // to try and work out the db vendors specific type.
815  if (precision > 19) {
816  precision = 19;
817  }
818  if (scale > 19) {
819  scale = 18;
820  }
821  switch (cType) {
822  case java.sql.Types.TINYINT:
823  return ("TINYINT");
824  case java.sql.Types.SMALLINT:
825  return ("SMALLINT");
826  case java.sql.Types.INTEGER:
827  return ("INTEGER");
828  case java.sql.Types.BIGINT:
829  return ("BIGINT");
830  case java.sql.Types.FLOAT:
831  return ("FLOAT");
832  case java.sql.Types.DECIMAL:
833  return ("DECIMAL(" + precision + "," + scale + ")");
834  case java.sql.Types.DOUBLE:
835  return ("DOUBLE");
836  case java.sql.Types.REAL:
837  return ("REAL");
838  case java.sql.Types.NUMERIC:
839  return ("NUMERIC(" + precision + "," + scale + ")");
840  case java.sql.Types.TIME:
841  return ("TIME");
842  case java.sql.Types.TIMESTAMP:
843  return ("TIMESTAMP");
844  case java.sql.Types.DATE:
845  return ("DATE");
846  case java.sql.Types.BOOLEAN:
847  case java.sql.Types
848  .BIT: // deal with postgress treating boolean as bit... this will bite me
849  return ("BOOLEAN");
850  case java.sql.Types.NVARCHAR:
851  case java.sql.Types.VARCHAR:
852  case java.sql.Types.NCHAR:
853  case java.sql.Types.CHAR:
854  case java.sql.Types.LONGVARCHAR:
855  case java.sql.Types.LONGNVARCHAR:
856  return ("TEXT ENCODING DICT");
857  default:
858  throw new AssertionError("Column type " + cType + " not Supported");
859  }
860  }
861 
862  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
863  throws SQLException {
864  TColumn col = new TColumn();
865 
866  col.nulls = new ArrayList<Boolean>(bufferSize);
867 
868  col.data = new TColumnData();
869 
870  switch (md.getColumnType(i)) {
871  case java.sql.Types.TINYINT:
872  case java.sql.Types.SMALLINT:
873  case java.sql.Types.INTEGER:
874  case java.sql.Types.BIGINT:
875  case java.sql.Types.TIME:
876  case java.sql.Types.TIMESTAMP:
877  case java.sql.Types
878  .BIT: // deal with postgress treating boolean as bit... this will bite me
879  case java.sql.Types.BOOLEAN:
880  case java.sql.Types.DATE:
881  case java.sql.Types.DECIMAL:
882  case java.sql.Types.NUMERIC:
883  col.data.int_col = new ArrayList<Long>(bufferSize);
884  break;
885 
886  case java.sql.Types.FLOAT:
887  case java.sql.Types.DOUBLE:
888  case java.sql.Types.REAL:
889  col.data.real_col = new ArrayList<Double>(bufferSize);
890  break;
891 
892  case java.sql.Types.NVARCHAR:
893  case java.sql.Types.VARCHAR:
894  case java.sql.Types.NCHAR:
895  case java.sql.Types.CHAR:
896  case java.sql.Types.LONGVARCHAR:
897  case java.sql.Types.LONGNVARCHAR:
898  case java.sql.Types.OTHER:
899  col.data.str_col = new ArrayList<String>(bufferSize);
900  break;
901 
902  default:
903  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
904  }
905  return col;
906  }
907 
908  private void setColValue(ResultSet rs,
909  TColumn col,
910  int columnType,
911  int colNum,
912  int scale,
913  String colTypeName) throws SQLException {
914  switch (columnType) {
915  case java.sql.Types
916  .BIT: // deal with postgress treating boolean as bit... this will bite me
917  case java.sql.Types.BOOLEAN:
918  Boolean b = rs.getBoolean(colNum);
919  if (rs.wasNull()) {
920  col.nulls.add(Boolean.TRUE);
921  col.data.int_col.add(0L);
922  } else {
923  col.nulls.add(Boolean.FALSE);
924  col.data.int_col.add(b ? 1L : 0L);
925  }
926  break;
927 
928  case java.sql.Types.DECIMAL:
929  case java.sql.Types.NUMERIC:
930  BigDecimal bd = rs.getBigDecimal(colNum);
931  if (rs.wasNull()) {
932  col.nulls.add(Boolean.TRUE);
933  col.data.int_col.add(0L);
934  } else {
935  col.nulls.add(Boolean.FALSE);
936  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
937  }
938  break;
939 
940  case java.sql.Types.TINYINT:
941  case java.sql.Types.SMALLINT:
942  case java.sql.Types.INTEGER:
943  case java.sql.Types.BIGINT:
944  Long l = rs.getLong(colNum);
945  if (rs.wasNull()) {
946  col.nulls.add(Boolean.TRUE);
947  col.data.int_col.add(new Long(0));
948  } else {
949  col.nulls.add(Boolean.FALSE);
950  col.data.int_col.add(l);
951  }
952  break;
953 
954  case java.sql.Types.TIME:
955  Time t = rs.getTime(colNum);
956  if (rs.wasNull()) {
957  col.nulls.add(Boolean.TRUE);
958  col.data.int_col.add(0L);
959 
960  } else {
961  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
962  col.nulls.add(Boolean.FALSE);
963  }
964 
965  break;
966  case java.sql.Types.TIMESTAMP:
967  Timestamp ts = rs.getTimestamp(colNum);
968  if (rs.wasNull()) {
969  col.nulls.add(Boolean.TRUE);
970  col.data.int_col.add(0L);
971 
972  } else {
973  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
974  col.nulls.add(Boolean.FALSE);
975  }
976 
977  break;
978  case java.sql.Types.DATE:
979  Date d = rs.getDate(colNum);
980  if (rs.wasNull()) {
981  col.nulls.add(Boolean.TRUE);
982  col.data.int_col.add(0L);
983 
984  } else {
985  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
986  col.nulls.add(Boolean.FALSE);
987  }
988  break;
989  case java.sql.Types.FLOAT:
990  case java.sql.Types.DOUBLE:
991  case java.sql.Types.REAL:
992  Double db = rs.getDouble(colNum);
993  if (rs.wasNull()) {
994  col.nulls.add(Boolean.TRUE);
995  col.data.real_col.add(new Double(0));
996 
997  } else {
998  col.nulls.add(Boolean.FALSE);
999  col.data.real_col.add(db);
1000  }
1001  break;
1002 
1003  case java.sql.Types.NVARCHAR:
1004  case java.sql.Types.VARCHAR:
1005  case java.sql.Types.NCHAR:
1006  case java.sql.Types.CHAR:
1007  case java.sql.Types.LONGVARCHAR:
1008  case java.sql.Types.LONGNVARCHAR:
1009  String strVal = rs.getString(colNum);
1010  if (rs.wasNull()) {
1011  col.nulls.add(Boolean.TRUE);
1012  col.data.str_col.add("");
1013 
1014  } else {
1015  col.data.str_col.add(strVal);
1016  col.nulls.add(Boolean.FALSE);
1017  }
1018  break;
1019  case java.sql.Types.OTHER:
1020  Object objVal = rs.getObject(colNum);
1021  if (rs.wasNull()) {
1022  col.nulls.add(Boolean.TRUE);
1023  col.data.str_col.add("");
1024  } else {
1025  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1026  col.nulls.add(Boolean.FALSE);
1027  }
1028  break;
1029  default:
1030  throw new AssertionError("Column type " + columnType + " not Supported");
1031  }
1032  }
1033 
1034  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1035  throws SQLException {
1036  col.nulls.clear();
1037 
1038  switch (md.getColumnType(i)) {
1039  case java.sql.Types.TINYINT:
1040  case java.sql.Types.SMALLINT:
1041  case java.sql.Types.INTEGER:
1042  case java.sql.Types.BIGINT:
1043  case java.sql.Types.TIME:
1044  case java.sql.Types.TIMESTAMP:
1045  case java.sql.Types
1046  .BIT: // deal with postgress treating boolean as bit... this will bite me
1047  case java.sql.Types.BOOLEAN:
1048  case java.sql.Types.DATE:
1049  case java.sql.Types.DECIMAL:
1050  case java.sql.Types.NUMERIC:
1051  col.data.int_col.clear();
1052  break;
1053 
1054  case java.sql.Types.FLOAT:
1055  case java.sql.Types.DOUBLE:
1056  case java.sql.Types.REAL:
1057  col.data.real_col.clear();
1058  break;
1059 
1060  case java.sql.Types.NVARCHAR:
1061  case java.sql.Types.VARCHAR:
1062  case java.sql.Types.NCHAR:
1063  case java.sql.Types.CHAR:
1064  case java.sql.Types.LONGVARCHAR:
1065  case java.sql.Types.LONGNVARCHAR:
1066  case java.sql.Types.OTHER:
1067  col.data.str_col.clear();
1068  break;
1069  default:
1070  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1071  }
1072  }
1073 }
void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:158
String getColType(int cType, int precision, int scale)
void executeDBCommand(String sql)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
tuple conn
Definition: report.py:41
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)