OmniSciDB  cde582ebc3
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 
24 import org.apache.commons.cli.*;
25 import org.apache.thrift.TException;
26 import org.apache.thrift.protocol.TBinaryProtocol;
27 import org.apache.thrift.protocol.TJSONProtocol;
28 import org.apache.thrift.protocol.TProtocol;
29 import org.apache.thrift.transport.TSocket;
30 import org.apache.thrift.transport.TTransport;
31 import org.apache.thrift.transport.TTransportException;
32 import org.slf4j.Logger;
33 import org.slf4j.LoggerFactory;
34 
35 import java.io.BufferedReader;
36 import java.io.FileReader;
37 import java.io.IOException;
38 import java.math.BigDecimal;
39 import java.security.KeyStore;
40 import java.sql.*;
41 import java.time.*;
42 import java.util.ArrayList;
43 import java.util.List;
44 
45 import ai.heavy.thrift.server.*;
46 
47 interface DateTimeUtils {
48  long getSecondsFromMilliseconds(long milliseconds);
49 }
50 
51 class MutuallyExlusiveOptionsException extends ParseException {
52  protected MutuallyExlusiveOptionsException(String message) {
53  super(message);
54  }
55 
56  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
57  StringBuffer sb = new StringBuffer(
58  "Mutually exclusive options used. " + errMsg + ". Options provided [");
59  for (String s : strings) {
60  sb.append(s);
61  sb.append(" ");
62  }
63  sb.setCharAt(sb.length() - 1, ']');
64  return new MutuallyExlusiveOptionsException(sb.toString());
65  }
66 }
67 
69  private Options options = new Options();
70 
71  void printVersion() {
72  System.out.println("SQLImporter Version 4.6.0");
73  }
74 
76  StringBuffer sb = new StringBuffer("\nSQLImporter ");
77  // Ready for PKI auth
78  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
79  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
80  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
81  // sb.append("([--ca-trust-store <ca trust store file name>]
82  // --ca-trust-store-password
83  // <trust store password> | --insecure)\n");
84  sb.append(
85  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
86  sb.append(
87  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
88  sb.append(
89  "-t <HEAVYAI target table> -b <transfer buffer size> -f <table fragment size>\n");
90  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
91  sb.append("\nSQLImporter -h | --help\n\n");
92 
93  HelpFormatter formatter = new HelpFormatter();
94  // Forces help to print out options in order they were added rather
95  // than in alphabetical order
96  formatter.setOptionComparator(null);
97  int help_width = 100;
98  formatter.printHelp(help_width, sb.toString(), "", options, "");
99  }
100 
102  options.addOption("r", true, "Row Load Limit");
103 
104  // HEAVYAI authentication options
105  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
106  options.addOption(
107  Option.builder("u").hasArg().desc("HEAVYAI User").longOpt("user").build());
108  options.addOption(Option.builder("p")
109  .hasArg()
110  .desc("HEAVYAI Password")
111  .longOpt("passwd")
112  .build());
113  // HEAVYAI transport options
114  OptionGroup transport_grp = new OptionGroup();
115  transport_grp.addOption(Option.builder()
116  .desc("use binary transport to connect to HEAVYAI ")
117  .longOpt("binary")
118  .build());
119  transport_grp.addOption(Option.builder()
120  .desc("use http transport to connect to HEAVYAI ")
121  .longOpt("http")
122  .build());
123  transport_grp.addOption(Option.builder()
124  .desc("use https transport to connect to HEAVYAI ")
125  .longOpt("https")
126  .build());
127  options.addOptionGroup(transport_grp);
128 
129  // HEAVYAI database server details
130  options.addOption(Option.builder("s")
131  .hasArg()
132  .desc("HEAVYAI Server")
133  .longOpt("server")
134  .build());
135  options.addOption(Option.builder("db")
136  .hasArg()
137  .desc("HEAVYAI Database")
138  .longOpt("database")
139  .build());
140  options.addOption(
141  Option.builder().hasArg().desc("HEAVYAI Port").longOpt("port").build());
142 
143  // HEAVYAI server authentication options
144  options.addOption(Option.builder()
145  .hasArg()
146  .desc("CA certificate trust store")
147  .longOpt("ca-trust-store")
148  .build());
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store password")
152  .longOpt("ca-trust-store-passwd")
153  .build());
154  options.addOption(
155  Option.builder()
156  .desc("Insecure TLS - do not validate server HEAVYAI server credentials")
157  .longOpt("insecure")
158  .build());
159 
160  // Other database connection details
161  options.addOption(Option.builder("d")
162  .hasArg()
163  .desc("JDBC driver class")
164  .longOpt("driver")
165  .build());
166  options.addOption(Option.builder("c")
167  .hasArg()
168  .desc("JDBC Connection string")
169  .longOpt("jdbcConnect")
170  .required()
171  .build());
172  options.addOption(Option.builder("su")
173  .hasArg()
174  .desc("Source User")
175  .longOpt("sourceUser")
176  .required()
177  .build());
178  options.addOption(Option.builder("sp")
179  .hasArg()
180  .desc("Source Password")
181  .longOpt("sourcePasswd")
182  .required()
183  .build());
184  options.addOption(Option.builder("ss")
185  .hasArg()
186  .desc("SQL Select statement")
187  .longOpt("sqlStmt")
188  .required()
189  .build());
190 
191  options.addOption(Option.builder("t")
192  .hasArg()
193  .desc("HEAVYAI Target Table")
194  .longOpt("targetTable")
195  .required()
196  .build());
197 
198  options.addOption(Option.builder("b")
199  .hasArg()
200  .desc("transfer buffer size")
201  .longOpt("bufferSize")
202  .build());
203  options.addOption(Option.builder("f")
204  .hasArg()
205  .desc("table fragment size")
206  .longOpt("fragmentSize")
207  .build());
208 
209  options.addOption(Option.builder("tr")
210  .desc("Truncate table if it exists")
211  .longOpt("truncate")
212  .build());
213 
214  options.addOption(Option.builder("i")
215  .hasArg()
216  .desc("File containing init command for DB")
217  .longOpt("initializeFile")
218  .build());
219 
220  options.addOption(
221  Option.builder("adtf")
222  .desc("Allow double to float conversion, note precision will be reduced")
223  .longOpt("AllowDoubleToFloat")
224  .build());
225 
226  options.addOption(
227  Option.builder("ain")
228  .desc("Allow conversion from bigger integer types to smaller. Overflow might occur, "
229  + "use it only when casting is impossible")
230  .longOpt("AllowIntegerNarrowing")
231  .build());
232 
233  options.addOption(
234  Option.builder("nprg")
235  .desc("Do not assign Render Groups to Polygons (faster import, but not renderable)")
236  .longOpt("noPolyRenderGroups")
237  .build());
238  }
239 
240  private Option setOptionRequired(Option option) {
241  option.setRequired(true);
242  return option;
243  }
244 
245  public CommandLine parse(String[] args) throws ParseException {
246  CommandLineParser clp = new DefaultParser() {
247  public CommandLine parse(Options options, String[] strings) throws ParseException {
248  Options helpOptions = new Options();
249  helpOptions.addOption(
250  Option.builder("h").desc("help message").longOpt("help").build());
251  try {
252  CommandLine cmd = super.parse(helpOptions, strings);
253  } catch (UnrecognizedOptionException uE) {
254  }
255  if (cmd.hasOption("help")) {
257  exit(0);
258  }
259  if (cmd.hasOption("version")) {
260  printVersion();
261  exit(0);
262  }
263  cmd = super.parse(options, strings);
264  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
265  throw new MissingArgumentException(
266  "Must supply either an HEAVYAI db user or a user certificate");
267  }
268  // if user supplied must have password and visa versa
269  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
270  options.addOption(setOptionRequired(options.getOption("user")));
271  options.addOption(setOptionRequired(options.getOption("passwd")));
272  super.parse(options, strings);
273  }
274 
275  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
276  // and
277  // visa versa
278  if (false) {
279  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
280  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
281  options.addOption(
282  setOptionRequired(options.getOption("ca-trust-store-password")));
283  super.parse(options, strings);
284  }
285  if (options.getOption("user").isRequired()
286  && options.getOption("client-key").isRequired()) {
288  MutuallyExlusiveOptionsException.create(
289  "user/password can not be use with client-cert/client-key",
290  strings);
291  throw meo;
292  }
293 
294  if (cmd.hasOption("http")
295  || cmd.hasOption("binary")
296  && (cmd.hasOption("client-cert")
297  || cmd.hasOption("client-key"))) {
298  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
299  "http|binary can not be use with ca-cert|client-cert|client-key",
300  strings);
301  }
302  }
303 
304  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
305  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
306  "insecure can only be use with https", strings);
307  throw meo;
308  }
309 
310  return cmd;
311  }
312 
313  public CommandLine parse(Options options, String[] strings, boolean b)
314  throws ParseException {
315  return null;
316  }
317  };
318  return clp.parse(options, args);
319  }
320 }
321 
322 public class SQLImporter {
323  protected String session = null;
324  protected Heavy.Client client = null;
325  private CommandLine cmd = null;
326  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
327  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
328  return milliseconds / 1000;
329  };
330 
332 
333  public static void main(String[] args) {
334  SQLImporter sq = new SQLImporter();
335  sq.doWork(args);
336  }
337 
338  void doWork(String[] args) {
339  // create Options object
340 
341  SQLImporter_args s_args = new SQLImporter_args();
342 
343  try {
344  cmd = s_args.parse(args);
345  } catch (ParseException ex) {
346  LOGGER.error(ex.getLocalizedMessage());
347  s_args.printHelpMessage();
348  exit(0);
349  }
350  executeQuery();
351  }
352 
353  void executeQuery() {
354  Connection conn = null;
355  Statement stmt = null;
356 
357  long totalTime = 0;
358 
359  try {
360  // Open a connection
361  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
362  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
363  cmd.getOptionValue("sourceUser"),
364  cmd.getOptionValue("sourcePasswd"));
365  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
366  long startTime = System.currentTimeMillis();
367 
368  // run init file script on targe DB if present
369  if (cmd.hasOption("initializeFile")) {
370  run_init(conn);
371  }
372 
373  try {
375  conn.setAutoCommit(false);
376  }
377  } catch (SQLException se) {
378  LOGGER.warn(
379  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
380  + se.toString());
381  }
382 
383  // Execute a query
384  stmt = conn.createStatement();
385 
386  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
387  // set the jdbc fetch buffer size to reduce the amount of records being moved to
388  // java from postgress
389  stmt.setFetchSize(bufferSize);
390  long timer;
391 
392  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
393 
394  // check if table already exists and is compatible in HEAVYAI with the query
395  // metadata
396  ResultSetMetaData md = rs.getMetaData();
397  checkDBTable(conn, md);
398 
399  timer = System.currentTimeMillis();
400 
401  long resultCount = 0;
402  int bufferCount = 0;
403  long total = 0;
404 
405  List<TColumn> cols = new ArrayList(md.getColumnCount());
406  for (int i = 1; i <= md.getColumnCount(); i++) {
407  TColumn col = setupBinaryColumn(i, md, bufferSize);
408  cols.add(col);
409  }
410 
411  boolean assignRenderGroups = !cmd.hasOption("noPolyRenderGroups");
412 
413  // read data from old DB
414  while (rs.next()) {
415  for (int i = 1; i <= md.getColumnCount(); i++) {
416  setColValue(rs,
417  cols.get(i - 1),
418  md.getColumnType(i),
419  i,
420  md.getScale(i),
421  md.getColumnTypeName(i));
422  }
423  resultCount++;
424  bufferCount++;
425  if (bufferCount == bufferSize) {
426  bufferCount = 0;
427  // send the buffer to HEAVY.AI
428  if (assignRenderGroups) {
429  client.load_table_binary_columnar_polys(
430  session, cmd.getOptionValue("targetTable"), cols, null, true);
431  } else {
432  client.load_table_binary_columnar(
433  session, cmd.getOptionValue("targetTable"), cols, null);
434  }
435  // recreate columnar store for use
436  for (int i = 1; i <= md.getColumnCount(); i++) {
437  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
438  }
439 
440  if (resultCount % 100000 == 0) {
441  LOGGER.info("Imported " + resultCount + " records");
442  }
443  }
444  }
445  if (bufferCount > 0) {
446  // send the LAST buffer to HEAVY.AI
447  if (assignRenderGroups) {
448  client.load_table_binary_columnar_polys(
449  session, cmd.getOptionValue("targetTable"), cols, null, true);
450  } else {
451  client.load_table_binary_columnar(
452  session, cmd.getOptionValue("targetTable"), cols, null);
453  }
454  bufferCount = 0;
455  }
456 
457  // dump render group assignment data immediately
458  if (assignRenderGroups) {
459  client.load_table_binary_columnar_polys(
460  session, cmd.getOptionValue("targetTable"), null, null, false);
461  }
462 
463  LOGGER.info("result set count is " + resultCount + " read time is "
464  + (System.currentTimeMillis() - timer) + "ms");
465 
466  // Clean-up environment
467  rs.close();
468  stmt.close();
469  conn.close();
470 
471  totalTime = System.currentTimeMillis() - startTime;
472  } catch (SQLException se) {
473  LOGGER.error("SQLException - " + se.toString());
474  se.printStackTrace();
475  } catch (TDBException ex) {
476  LOGGER.error("TDBException - " + ex.getError_msg());
477  ex.printStackTrace();
478  } catch (TException ex) {
479  LOGGER.error("TException failed - " + ex.toString());
480  ex.printStackTrace();
481  } finally {
482  // finally block used to close resources
483  try {
484  if (stmt != null) {
485  stmt.close();
486  }
487  } catch (SQLException se2) {
488  } // nothing we can do
489  try {
490  if (conn != null) {
491  conn.close();
492  }
493  } catch (SQLException se) {
494  LOGGER.error("SQlException in close - " + se.toString());
495  se.printStackTrace();
496  }
497  try {
498  if (session != null) {
499  client.disconnect(session);
500  }
501  } catch (TDBException ex) {
502  LOGGER.error("TDBException - in finalization " + ex.getError_msg());
503  ex.printStackTrace();
504  } catch (TException ex) {
505  LOGGER.error("TException - in finalization" + ex.toString());
506  ex.printStackTrace();
507  }
508  }
509  }
510 
511  private void run_init(Connection conn) {
512  // attempt to open file
513  String line = "";
514  try {
515  BufferedReader reader =
516  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
517  Statement stmt = conn.createStatement();
518  while ((line = reader.readLine()) != null) {
519  if (line.isEmpty()) {
520  continue;
521  }
522  LOGGER.info("Running : " + line);
523  stmt.execute(line);
524  }
525  stmt.close();
526  reader.close();
527  } catch (IOException e) {
528  LOGGER.error("Exception occurred trying to read initialize file: "
529  + cmd.getOptionValue("initFile"));
530  exit(1);
531  } catch (SQLException e) {
532  LOGGER.error(
533  "Exception occurred trying to execute initialize file entry : " + line);
534  exit(1);
535  }
536  }
537 
538  private void help(Options options) {
539  // automatically generate the help statement
540  HelpFormatter formatter = new HelpFormatter();
541  formatter.setOptionComparator(null); // get options in the order they are created
542  formatter.printHelp("SQLImporter", options);
543  }
544 
545  private void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
546  throws SQLException {
548  String tName = cmd.getOptionValue("targetTable");
549 
550  if (tableExists(tName)) {
551  // check if we want to truncate
552  if (cmd.hasOption("truncate")) {
553  executeDBCommand("Drop table " + tName);
554  createDBTable(otherdb_conn, md);
555  } else {
556  List<TColumnType> columnInfo = getColumnInfo(tName);
557  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
558  }
559  } else {
560  createDBTable(otherdb_conn, md);
561  }
562  }
563 
564  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
565  List<TColumnType> dstColumns,
566  ResultSetMetaData srcColumns) throws SQLException {
567  if (srcColumns.getColumnCount() != dstColumns.size()) {
568  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
569  + " versus Source " + srcColumns.getColumnCount());
570  exit(1);
571  }
572  for (int i = 1; i <= dstColumns.size(); ++i) {
573  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
574  srcColumns.getColumnName(i))) {
575  LOGGER.error(
576  "Destination table does not have matching column in same order for column number "
577  + i + " destination column name is " + dstColumns.get(i - 1).col_name
578  + " versus target column " + srcColumns.getColumnName(i));
579  exit(1);
580  }
581  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
582  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
583  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
584  int srcType = srcColumns.getColumnType(i);
585  int srcPrecision = srcColumns.getPrecision(i);
586  int srcScale = srcColumns.getScale(i);
587 
588  boolean match = false;
589  switch (srcType) {
590  case java.sql.Types.TINYINT:
591  match |= dstType == TDatumType.TINYINT;
592  // NOTE: it's okay to import smaller type to a bigger one,
593  // so we just fall through and try to match the next type.
594  // But the order of case statements is important here!
595  case java.sql.Types.SMALLINT:
596  match |= dstType == TDatumType.SMALLINT;
597  case java.sql.Types.INTEGER:
598  match |= dstType == TDatumType.INT;
599  case java.sql.Types.BIGINT:
600  match |= dstType == TDatumType.BIGINT;
601  if (cmd.hasOption("AllowIntegerNarrowing")) {
602  match |= dstType == TDatumType.TINYINT || dstType == TDatumType.SMALLINT
603  || dstType == TDatumType.INT;
604  }
605  break;
606  case java.sql.Types.DECIMAL:
607  case java.sql.Types.NUMERIC:
608  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
609  && dstScale == srcScale;
610  break;
611  case java.sql.Types.FLOAT:
612  case java.sql.Types.REAL:
613  match |= dstType == TDatumType.FLOAT;
614  // Fall through and try double
615  case java.sql.Types.DOUBLE:
616  match |= dstType == TDatumType.DOUBLE;
617  if (cmd.hasOption("AllowDoubleToFloat")) {
618  match |= dstType == TDatumType.FLOAT;
619  }
620  break;
621  case java.sql.Types.TIME:
622  match = dstType == TDatumType.TIME;
623  break;
624  case java.sql.Types.TIMESTAMP:
625  match = dstType == TDatumType.TIMESTAMP;
626  break;
627  case java.sql.Types.DATE:
628  match = dstType == TDatumType.DATE;
629  break;
630  case java.sql.Types.BOOLEAN:
631  case java.sql.Types
632  .BIT: // deal with postgres treating boolean as bit... this will bite me
633  match = dstType == TDatumType.BOOL;
634  break;
635  case java.sql.Types.NVARCHAR:
636  case java.sql.Types.VARCHAR:
637  case java.sql.Types.NCHAR:
638  case java.sql.Types.CHAR:
639  case java.sql.Types.LONGVARCHAR:
640  case java.sql.Types.LONGNVARCHAR:
641  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
642  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
643  || dstType == TDatumType.LINESTRING);
644  break;
645  case java.sql.Types.OTHER:
646  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
647  // it makes no difference for OmniSciDB at the moment
648  Db_vendor_types.GisType gisType =
649  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
650  if (gisType.srid != dstScale) {
651  match = false;
652  break;
653  }
654  switch (dstType) {
655  case POINT:
656  match = gisType.type.equalsIgnoreCase("POINT");
657  break;
658  case LINESTRING:
659  match = gisType.type.equalsIgnoreCase("LINESTRING");
660  break;
661  case POLYGON:
662  match = gisType.type.equalsIgnoreCase("POLYGON");
663  break;
664  case MULTIPOLYGON:
665  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
666  break;
667  default:
668  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
669  + " not Supported");
670  exit(1);
671  }
672  break;
673  default:
674  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
675  + " not Supported");
676  exit(1);
677  }
678  if (!match) {
679  LOGGER.error("Source and destination types for column "
680  + srcColumns.getColumnName(i)
681  + " do not match. Please make sure that type, precision and scale are exactly the same");
682  exit(1);
683  }
684  }
685  }
686 
687  private void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData) {
688  StringBuilder sb = new StringBuilder();
689  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
690 
691  // Now iterate the metadata
692  try {
693  for (int i = 1; i <= metaData.getColumnCount(); i++) {
694  if (i > 1) {
695  sb.append(",");
696  }
697  LOGGER.debug("Column name is " + metaData.getColumnName(i));
698  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
699  LOGGER.debug("Column type is " + metaData.getColumnType(i));
700 
701  sb.append(metaData.getColumnName(i)).append(" ");
702  int col_type = metaData.getColumnType(i);
703  if (col_type == java.sql.Types.OTHER) {
704  Db_vendor_types.GisType type =
705  vendor_types.find_gis_type(otherdb_conn, metaData, i);
706  sb.append(Db_vendor_types.gis_type_to_str(type));
707  } else {
708  sb.append(getColType(metaData.getColumnType(i),
709  metaData.getPrecision(i),
710  metaData.getScale(i)));
711  }
712  }
713  sb.append(")");
714 
715  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
716  sb.append(" with (fragment_size = ");
717  sb.append(cmd.getOptionValue("fragmentSize", "0"));
718  sb.append(")");
719  }
720 
721  } catch (SQLException ex) {
722  LOGGER.error("Error processing the metadata - " + ex.toString());
723  exit(1);
724  }
725 
726  executeDBCommand(sb.toString());
727  }
728 
729  private void createDBConnection() {
730  TTransport transport = null;
731  TProtocol protocol = new TBinaryProtocol(transport);
732  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
733  String server = cmd.getOptionValue("server", "localhost");
734  try {
735  // Uses default certificate stores.
736  boolean load_trust_store = cmd.hasOption("https");
737  SockTransportProperties skT = null;
738  if (cmd.hasOption("https")) {
739  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
740  !cmd.hasOption("insecure"));
741  transport = skT.openHttpsClientTransport(server, port);
742  transport.open();
743  protocol = new TJSONProtocol(transport);
744  } else if (cmd.hasOption("http")) {
745  skT = SockTransportProperties.getUnencryptedClient();
746  transport = skT.openHttpClientTransport(server, port);
747  protocol = new TJSONProtocol(transport);
748  } else {
749  skT = SockTransportProperties.getUnencryptedClient();
750  transport = skT.openClientTransport(server, port);
751  transport.open();
752  protocol = new TBinaryProtocol(transport);
753  }
754 
755  client = new Heavy.Client(protocol);
756  // This if will be useless until PKI signon
757  if (cmd.hasOption("user")) {
758  session = client.connect(cmd.getOptionValue("user", "admin"),
759  cmd.getOptionValue("passwd", "HyperInteractive"),
760  cmd.getOptionValue("database", "omnisci"));
761  }
762  LOGGER.debug("Connected session is " + session);
763 
764  } catch (TTransportException ex) {
765  LOGGER.error("Connection failed - " + ex.toString());
766  exit(1);
767  } catch (TDBException ex) {
768  LOGGER.error("Connection failed - " + ex.getError_msg());
769  exit(2);
770  } catch (TException ex) {
771  LOGGER.error("Connection failed - " + ex.toString());
772  exit(3);
773  } catch (Exception ex) {
774  LOGGER.error("General exception - " + ex.toString());
775  exit(4);
776  }
777  }
778 
779  private List<TColumnType> getColumnInfo(String tName) {
780  LOGGER.debug("Getting columns for " + tName);
781  List<TColumnType> row_descriptor = null;
782  try {
783  TTableDetails table_details = client.get_table_details(session, tName);
784  row_descriptor = table_details.row_desc;
785  } catch (TDBException ex) {
786  LOGGER.error("column check failed - " + ex.getError_msg());
787  exit(3);
788  } catch (TException ex) {
789  LOGGER.error("column check failed - " + ex.toString());
790  exit(3);
791  }
792  return row_descriptor;
793  }
794 
795  private boolean tableExists(String tName) {
796  LOGGER.debug("Check for table " + tName);
797  try {
798  List<String> recv_get_tables = client.get_tables(session);
799  for (String s : recv_get_tables) {
800  if (s.equals(tName)) {
801  return true;
802  }
803  }
804  } catch (TDBException ex) {
805  LOGGER.error("Table check failed - " + ex.getError_msg());
806  exit(3);
807  } catch (TException ex) {
808  LOGGER.error("Table check failed - " + ex.toString());
809  exit(3);
810  }
811  return false;
812  }
813 
814  private void executeDBCommand(String sql) {
815  LOGGER.info("Run Command - " + sql);
816 
817  try {
818  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
819  } catch (TDBException ex) {
820  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
821  exit(1);
822  } catch (TException ex) {
823  LOGGER.error("SQL Execute failed - " + ex.toString());
824  exit(1);
825  }
826  }
827 
828  private String getColType(int cType, int precision, int scale) {
829  // Note - if cType is OTHER a earlier call will have been made
830  // to try and work out the db vendors specific type.
831  if (precision > 19) {
832  precision = 19;
833  }
834  if (scale > 19) {
835  scale = 18;
836  }
837  switch (cType) {
838  case java.sql.Types.TINYINT:
839  return ("TINYINT");
840  case java.sql.Types.SMALLINT:
841  return ("SMALLINT");
842  case java.sql.Types.INTEGER:
843  return ("INTEGER");
844  case java.sql.Types.BIGINT:
845  return ("BIGINT");
846  case java.sql.Types.FLOAT:
847  return ("FLOAT");
848  case java.sql.Types.DECIMAL:
849  return ("DECIMAL(" + precision + "," + scale + ")");
850  case java.sql.Types.DOUBLE:
851  return ("DOUBLE");
852  case java.sql.Types.REAL:
853  return ("REAL");
854  case java.sql.Types.NUMERIC:
855  return ("NUMERIC(" + precision + "," + scale + ")");
856  case java.sql.Types.TIME:
857  return ("TIME");
858  case java.sql.Types.TIMESTAMP:
859  return ("TIMESTAMP");
860  case java.sql.Types.DATE:
861  return ("DATE");
862  case java.sql.Types.BOOLEAN:
863  case java.sql.Types
864  .BIT: // deal with postgress treating boolean as bit... this will bite me
865  return ("BOOLEAN");
866  case java.sql.Types.NVARCHAR:
867  case java.sql.Types.VARCHAR:
868  case java.sql.Types.NCHAR:
869  case java.sql.Types.CHAR:
870  case java.sql.Types.LONGVARCHAR:
871  case java.sql.Types.LONGNVARCHAR:
872  return ("TEXT ENCODING DICT");
873  default:
874  throw new AssertionError("Column type " + cType + " not Supported");
875  }
876  }
877 
878  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
879  throws SQLException {
880  TColumn col = new TColumn();
881 
882  col.nulls = new ArrayList<Boolean>(bufferSize);
883 
884  col.data = new TColumnData();
885 
886  switch (md.getColumnType(i)) {
887  case java.sql.Types.TINYINT:
888  case java.sql.Types.SMALLINT:
889  case java.sql.Types.INTEGER:
890  case java.sql.Types.BIGINT:
891  case java.sql.Types.TIME:
892  case java.sql.Types.TIMESTAMP:
893  case java.sql.Types
894  .BIT: // deal with postgress treating boolean as bit... this will bite me
895  case java.sql.Types.BOOLEAN:
896  case java.sql.Types.DATE:
897  case java.sql.Types.DECIMAL:
898  case java.sql.Types.NUMERIC:
899  col.data.int_col = new ArrayList<Long>(bufferSize);
900  break;
901 
902  case java.sql.Types.FLOAT:
903  case java.sql.Types.DOUBLE:
904  case java.sql.Types.REAL:
905  col.data.real_col = new ArrayList<Double>(bufferSize);
906  break;
907 
908  case java.sql.Types.NVARCHAR:
909  case java.sql.Types.VARCHAR:
910  case java.sql.Types.NCHAR:
911  case java.sql.Types.CHAR:
912  case java.sql.Types.LONGVARCHAR:
913  case java.sql.Types.LONGNVARCHAR:
914  case java.sql.Types.OTHER:
915  col.data.str_col = new ArrayList<String>(bufferSize);
916  break;
917 
918  default:
919  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
920  }
921  return col;
922  }
923 
924  private void setColValue(ResultSet rs,
925  TColumn col,
926  int columnType,
927  int colNum,
928  int scale,
929  String colTypeName) throws SQLException {
930  switch (columnType) {
931  case java.sql.Types
932  .BIT: // deal with postgress treating boolean as bit... this will bite me
933  case java.sql.Types.BOOLEAN:
934  Boolean b = rs.getBoolean(colNum);
935  if (rs.wasNull()) {
936  col.nulls.add(Boolean.TRUE);
937  col.data.int_col.add(0L);
938  } else {
939  col.nulls.add(Boolean.FALSE);
940  col.data.int_col.add(b ? 1L : 0L);
941  }
942  break;
943 
944  case java.sql.Types.DECIMAL:
945  case java.sql.Types.NUMERIC:
946  BigDecimal bd = rs.getBigDecimal(colNum);
947  if (rs.wasNull()) {
948  col.nulls.add(Boolean.TRUE);
949  col.data.int_col.add(0L);
950  } else {
951  col.nulls.add(Boolean.FALSE);
952  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
953  }
954  break;
955 
956  case java.sql.Types.TINYINT:
957  case java.sql.Types.SMALLINT:
958  case java.sql.Types.INTEGER:
959  case java.sql.Types.BIGINT:
960  Long l = rs.getLong(colNum);
961  if (rs.wasNull()) {
962  col.nulls.add(Boolean.TRUE);
963  col.data.int_col.add(new Long(0));
964  } else {
965  col.nulls.add(Boolean.FALSE);
966  col.data.int_col.add(l);
967  }
968  break;
969 
970  case java.sql.Types.TIME:
971  Time t = rs.getTime(colNum);
972  if (rs.wasNull()) {
973  col.nulls.add(Boolean.TRUE);
974  col.data.int_col.add(0L);
975 
976  } else {
977  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
978  col.nulls.add(Boolean.FALSE);
979  }
980 
981  break;
982  case java.sql.Types.TIMESTAMP:
983  Timestamp ts = rs.getTimestamp(colNum);
984  if (rs.wasNull()) {
985  col.nulls.add(Boolean.TRUE);
986  col.data.int_col.add(0L);
987 
988  } else {
989  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
990  col.nulls.add(Boolean.FALSE);
991  }
992 
993  break;
994  case java.sql.Types.DATE:
995  Date d = rs.getDate(colNum);
996  if (rs.wasNull()) {
997  col.nulls.add(Boolean.TRUE);
998  col.data.int_col.add(0L);
999 
1000  } else {
1001  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
1002  col.nulls.add(Boolean.FALSE);
1003  }
1004  break;
1005  case java.sql.Types.FLOAT:
1006  case java.sql.Types.DOUBLE:
1007  case java.sql.Types.REAL:
1008  Double db = rs.getDouble(colNum);
1009  if (rs.wasNull()) {
1010  col.nulls.add(Boolean.TRUE);
1011  col.data.real_col.add(new Double(0));
1012 
1013  } else {
1014  col.nulls.add(Boolean.FALSE);
1015  col.data.real_col.add(db);
1016  }
1017  break;
1018 
1019  case java.sql.Types.NVARCHAR:
1020  case java.sql.Types.VARCHAR:
1021  case java.sql.Types.NCHAR:
1022  case java.sql.Types.CHAR:
1023  case java.sql.Types.LONGVARCHAR:
1024  case java.sql.Types.LONGNVARCHAR:
1025  String strVal = rs.getString(colNum);
1026  if (rs.wasNull()) {
1027  col.nulls.add(Boolean.TRUE);
1028  col.data.str_col.add("");
1029 
1030  } else {
1031  col.data.str_col.add(strVal);
1032  col.nulls.add(Boolean.FALSE);
1033  }
1034  break;
1035  case java.sql.Types.OTHER:
1036  Object objVal = rs.getObject(colNum);
1037  if (rs.wasNull()) {
1038  col.nulls.add(Boolean.TRUE);
1039  col.data.str_col.add("");
1040  } else {
1041  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1042  col.nulls.add(Boolean.FALSE);
1043  }
1044  break;
1045  default:
1046  throw new AssertionError("Column type " + columnType + " not Supported");
1047  }
1048  }
1049 
1050  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1051  throws SQLException {
1052  col.nulls.clear();
1053 
1054  switch (md.getColumnType(i)) {
1055  case java.sql.Types.TINYINT:
1056  case java.sql.Types.SMALLINT:
1057  case java.sql.Types.INTEGER:
1058  case java.sql.Types.BIGINT:
1059  case java.sql.Types.TIME:
1060  case java.sql.Types.TIMESTAMP:
1061  case java.sql.Types
1062  .BIT: // deal with postgress treating boolean as bit... this will bite me
1063  case java.sql.Types.BOOLEAN:
1064  case java.sql.Types.DATE:
1065  case java.sql.Types.DECIMAL:
1066  case java.sql.Types.NUMERIC:
1067  col.data.int_col.clear();
1068  break;
1069 
1070  case java.sql.Types.FLOAT:
1071  case java.sql.Types.DOUBLE:
1072  case java.sql.Types.REAL:
1073  col.data.real_col.clear();
1074  break;
1075 
1076  case java.sql.Types.NVARCHAR:
1077  case java.sql.Types.VARCHAR:
1078  case java.sql.Types.NCHAR:
1079  case java.sql.Types.CHAR:
1080  case java.sql.Types.LONGVARCHAR:
1081  case java.sql.Types.LONGNVARCHAR:
1082  case java.sql.Types.OTHER:
1083  col.data.str_col.clear();
1084  break;
1085  default:
1086  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1087  }
1088  }
1089 }
void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:168
String getColType(int cType, int precision, int scale)
void executeDBCommand(String sql)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)