OmniSciDB  a987f07e93
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 
24 import org.apache.commons.cli.*;
25 import org.apache.thrift.TException;
26 import org.apache.thrift.protocol.TBinaryProtocol;
27 import org.apache.thrift.protocol.TJSONProtocol;
28 import org.apache.thrift.protocol.TProtocol;
29 import org.apache.thrift.transport.TSocket;
30 import org.apache.thrift.transport.TTransport;
31 import org.apache.thrift.transport.TTransportException;
32 import org.slf4j.Logger;
33 import org.slf4j.LoggerFactory;
34 
35 import java.io.BufferedReader;
36 import java.io.FileReader;
37 import java.io.IOException;
38 import java.math.BigDecimal;
39 import java.security.KeyStore;
40 import java.sql.*;
41 import java.time.*;
42 import java.util.ArrayList;
43 import java.util.List;
44 
45 import ai.heavy.thrift.server.*;
46 
47 interface DateTimeUtils {
48  long getSecondsFromMilliseconds(long milliseconds);
49 }
50 
51 class MutuallyExlusiveOptionsException extends ParseException {
52  protected MutuallyExlusiveOptionsException(String message) {
53  super(message);
54  }
55 
56  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
57  StringBuffer sb = new StringBuffer(
58  "Mutually exclusive options used. " + errMsg + ". Options provided [");
59  for (String s : strings) {
60  sb.append(s);
61  sb.append(" ");
62  }
63  sb.setCharAt(sb.length() - 1, ']');
64  return new MutuallyExlusiveOptionsException(sb.toString());
65  }
66 }
67 
69  private Options options = new Options();
70 
71  void printVersion() {
72  System.out.println("SQLImporter Version 4.6.0");
73  }
74 
76  StringBuffer sb = new StringBuffer("\nSQLImporter ");
77  // Ready for PKI auth
78  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
79  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
80  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
81  // sb.append("([--ca-trust-store <ca trust store file name>]
82  // --ca-trust-store-password
83  // <trust store password> | --insecure)\n");
84  sb.append(
85  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
86  sb.append(
87  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
88  sb.append(
89  "-t <HEAVYAI target table> -b <transfer buffer size> -f <table fragment size>\n");
90  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
91  sb.append("\nSQLImporter -h | --help\n\n");
92 
93  HelpFormatter formatter = new HelpFormatter();
94  // Forces help to print out options in order they were added rather
95  // than in alphabetical order
96  formatter.setOptionComparator(null);
97  int help_width = 100;
98  formatter.printHelp(help_width, sb.toString(), "", options, "");
99  }
100 
102  options.addOption("r", true, "Row Load Limit");
103 
104  // HEAVYAI authentication options
105  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
106  options.addOption(
107  Option.builder("u").hasArg().desc("HEAVYAI User").longOpt("user").build());
108  options.addOption(Option.builder("p")
109  .hasArg()
110  .desc("HEAVYAI Password")
111  .longOpt("passwd")
112  .build());
113  // HEAVYAI transport options
114  OptionGroup transport_grp = new OptionGroup();
115  transport_grp.addOption(Option.builder()
116  .desc("use binary transport to connect to HEAVYAI ")
117  .longOpt("binary")
118  .build());
119  transport_grp.addOption(Option.builder()
120  .desc("use http transport to connect to HEAVYAI ")
121  .longOpt("http")
122  .build());
123  transport_grp.addOption(Option.builder()
124  .desc("use https transport to connect to HEAVYAI ")
125  .longOpt("https")
126  .build());
127  options.addOptionGroup(transport_grp);
128 
129  // HEAVYAI database server details
130  options.addOption(Option.builder("s")
131  .hasArg()
132  .desc("HEAVYAI Server")
133  .longOpt("server")
134  .build());
135  options.addOption(Option.builder("db")
136  .hasArg()
137  .desc("HEAVYAI Database")
138  .longOpt("database")
139  .build());
140  options.addOption(
141  Option.builder().hasArg().desc("HEAVYAI Port").longOpt("port").build());
142 
143  // HEAVYAI server authentication options
144  options.addOption(Option.builder()
145  .hasArg()
146  .desc("CA certificate trust store")
147  .longOpt("ca-trust-store")
148  .build());
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store password")
152  .longOpt("ca-trust-store-passwd")
153  .build());
154  options.addOption(
155  Option.builder()
156  .desc("Insecure TLS - do not validate server HEAVYAI server credentials")
157  .longOpt("insecure")
158  .build());
159 
160  // Other database connection details
161  options.addOption(Option.builder("d")
162  .hasArg()
163  .desc("JDBC driver class")
164  .longOpt("driver")
165  .build());
166  options.addOption(Option.builder("c")
167  .hasArg()
168  .desc("JDBC Connection string")
169  .longOpt("jdbcConnect")
170  .required()
171  .build());
172  options.addOption(Option.builder("su")
173  .hasArg()
174  .desc("Source User")
175  .longOpt("sourceUser")
176  .required()
177  .build());
178  options.addOption(Option.builder("sp")
179  .hasArg()
180  .desc("Source Password")
181  .longOpt("sourcePasswd")
182  .required()
183  .build());
184  options.addOption(Option.builder("ss")
185  .hasArg()
186  .desc("SQL Select statement")
187  .longOpt("sqlStmt")
188  .required()
189  .build());
190 
191  options.addOption(Option.builder("t")
192  .hasArg()
193  .desc("HEAVYAI Target Table")
194  .longOpt("targetTable")
195  .required()
196  .build());
197 
198  options.addOption(Option.builder("b")
199  .hasArg()
200  .desc("transfer buffer size")
201  .longOpt("bufferSize")
202  .build());
203  options.addOption(Option.builder("f")
204  .hasArg()
205  .desc("table fragment size")
206  .longOpt("fragmentSize")
207  .build());
208 
209  options.addOption(Option.builder("tr")
210  .desc("Truncate table if it exists")
211  .longOpt("truncate")
212  .build());
213 
214  options.addOption(Option.builder("i")
215  .hasArg()
216  .desc("File containing init command for DB")
217  .longOpt("initializeFile")
218  .build());
219 
220  options.addOption(
221  Option.builder("adtf")
222  .desc("Allow double to float conversion, note precision will be reduced")
223  .longOpt("AllowDoubleToFloat")
224  .build());
225 
226  options.addOption(
227  Option.builder("ain")
228  .desc("Allow conversion from bigger integer types to smaller. Overflow might occur, "
229  + "use it only when casting is impossible")
230  .longOpt("AllowIntegerNarrowing")
231  .build());
232 
233  options.addOption(
234  Option.builder("nprg")
235  .desc("Do not assign Render Groups to Polygons (faster import, but not renderable)")
236  .longOpt("noPolyRenderGroups")
237  .build());
238  }
239 
240  private Option setOptionRequired(Option option) {
241  option.setRequired(true);
242  return option;
243  }
244 
245  public CommandLine parse(String[] args) throws ParseException {
246  CommandLineParser clp = new DefaultParser() {
247  public CommandLine parse(Options options, String[] strings) throws ParseException {
248  Options helpOptions = new Options();
249  helpOptions.addOption(
250  Option.builder("h").desc("help message").longOpt("help").build());
251  try {
252  CommandLine cmd = super.parse(helpOptions, strings);
253  } catch (UnrecognizedOptionException uE) {
254  }
255  if (cmd.hasOption("help")) {
257  exit(0);
258  }
259  if (cmd.hasOption("version")) {
260  printVersion();
261  exit(0);
262  }
263  cmd = super.parse(options, strings);
264  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
265  throw new MissingArgumentException(
266  "Must supply either an HEAVYAI db user or a user certificate");
267  }
268  // if user supplied must have password and visa versa
269  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
270  options.addOption(setOptionRequired(options.getOption("user")));
271  options.addOption(setOptionRequired(options.getOption("passwd")));
272  super.parse(options, strings);
273  }
274 
275  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
276  // and
277  // visa versa
278  if (false) {
279  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
280  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
281  options.addOption(
282  setOptionRequired(options.getOption("ca-trust-store-password")));
283  super.parse(options, strings);
284  }
285  if (options.getOption("user").isRequired()
286  && options.getOption("client-key").isRequired()) {
288  MutuallyExlusiveOptionsException.create(
289  "user/password can not be use with client-cert/client-key",
290  strings);
291  throw meo;
292  }
293 
294  if (cmd.hasOption("http")
295  || cmd.hasOption("binary")
296  && (cmd.hasOption("client-cert")
297  || cmd.hasOption("client-key"))) {
298  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
299  "http|binary can not be use with ca-cert|client-cert|client-key",
300  strings);
301  }
302  }
303 
304  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
305  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
306  "insecure can only be use with https", strings);
307  throw meo;
308  }
309 
310  return cmd;
311  }
312 
313  public CommandLine parse(Options options, String[] strings, boolean b)
314  throws ParseException {
315  return null;
316  }
317  };
318  return clp.parse(options, args);
319  }
320 }
321 
322 public class SQLImporter {
323  protected String session = null;
324  protected Heavy.Client client = null;
325  private CommandLine cmd = null;
326  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
327  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
328  return milliseconds / 1000;
329  };
330 
332 
333  public static void main(String[] args) {
334  SQLImporter sq = new SQLImporter();
335  sq.doWork(args);
336  }
337 
338  void doWork(String[] args) {
339  // create Options object
340 
341  SQLImporter_args s_args = new SQLImporter_args();
342 
343  try {
344  cmd = s_args.parse(args);
345  } catch (ParseException ex) {
346  LOGGER.error(ex.getLocalizedMessage());
347  s_args.printHelpMessage();
348  exit(0);
349  }
350  executeQuery();
351  }
352 
353  void executeQuery() {
354  Connection conn = null;
355  Statement stmt = null;
356 
357  long totalTime = 0;
358 
359  try {
360  // Open a connection
361  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
362  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
363  cmd.getOptionValue("sourceUser"),
364  cmd.getOptionValue("sourcePasswd"));
365  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
366  long startTime = System.currentTimeMillis();
367 
368  // run init file script on targe DB if present
369  if (cmd.hasOption("initializeFile")) {
370  run_init(conn);
371  }
372 
373  try {
375  conn.setAutoCommit(false);
376  }
377  } catch (SQLException se) {
378  LOGGER.warn(
379  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
380  + se.toString());
381  }
382 
383  // Execute a query
384  stmt = conn.createStatement();
385 
386  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
387  // set the jdbc fetch buffer size to reduce the amount of records being moved to
388  // java from postgress
389  stmt.setFetchSize(bufferSize);
390  long timer;
391 
392  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
393 
394  // check if table already exists and is compatible in HEAVYAI with the query
395  // metadata
396  ResultSetMetaData md = rs.getMetaData();
397  checkDBTable(conn, md);
398 
399  timer = System.currentTimeMillis();
400 
401  long resultCount = 0;
402  int bufferCount = 0;
403  long total = 0;
404 
405  List<TColumn> cols = new ArrayList(md.getColumnCount());
406  for (int i = 1; i <= md.getColumnCount(); i++) {
407  TColumn col = setupBinaryColumn(i, md, bufferSize);
408  cols.add(col);
409  }
410 
411  boolean assignRenderGroups = !cmd.hasOption("noPolyRenderGroups");
412 
413  // read data from old DB
414  while (rs.next()) {
415  for (int i = 1; i <= md.getColumnCount(); i++) {
416  setColValue(rs,
417  cols.get(i - 1),
418  md.getColumnType(i),
419  i,
420  md.getScale(i),
421  md.getColumnTypeName(i));
422  }
423  resultCount++;
424  bufferCount++;
425  if (bufferCount == bufferSize) {
426  bufferCount = 0;
427  // send the buffer to HEAVY.AI
428  if (assignRenderGroups) {
429  client.load_table_binary_columnar_polys(
430  session, cmd.getOptionValue("targetTable"), cols, null, true);
431  } else {
432  client.load_table_binary_columnar(
433  session, cmd.getOptionValue("targetTable"), cols, null);
434  }
435  // recreate columnar store for use
436  for (int i = 1; i <= md.getColumnCount(); i++) {
437  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
438  }
439 
440  if (resultCount % 100000 == 0) {
441  LOGGER.info("Imported " + resultCount + " records");
442  }
443  }
444  }
445  if (bufferCount > 0) {
446  // send the LAST buffer to HEAVY.AI
447  if (assignRenderGroups) {
448  client.load_table_binary_columnar_polys(
449  session, cmd.getOptionValue("targetTable"), cols, null, true);
450  } else {
451  client.load_table_binary_columnar(
452  session, cmd.getOptionValue("targetTable"), cols, null);
453  }
454  bufferCount = 0;
455  }
456 
457  // dump render group assignment data immediately
458  if (assignRenderGroups) {
459  client.load_table_binary_columnar_polys(
460  session, cmd.getOptionValue("targetTable"), null, null, false);
461  }
462 
463  LOGGER.info("result set count is " + resultCount + " read time is "
464  + (System.currentTimeMillis() - timer) + "ms");
465 
466  // Clean-up environment
467  rs.close();
468  stmt.close();
469  conn.close();
470 
471  totalTime = System.currentTimeMillis() - startTime;
472  } catch (SQLException se) {
473  LOGGER.error("SQLException - " + se.toString());
474  se.printStackTrace();
475  } catch (TDBException ex) {
476  LOGGER.error("TDBException - " + ex.getError_msg());
477  ex.printStackTrace();
478  } catch (TException ex) {
479  LOGGER.error("TException failed - " + ex.toString());
480  ex.printStackTrace();
481  } finally {
482  // finally block used to close resources
483  try {
484  if (stmt != null) {
485  stmt.close();
486  }
487  } catch (SQLException se2) {
488  } // nothing we can do
489  try {
490  if (conn != null) {
491  conn.close();
492  }
493  } catch (SQLException se) {
494  LOGGER.error("SQlException in close - " + se.toString());
495  se.printStackTrace();
496  }
497  try {
498  if (session != null) {
499  client.disconnect(session);
500  }
501  } catch (TDBException ex) {
502  LOGGER.error("TDBException - in finalization " + ex.getError_msg());
503  ex.printStackTrace();
504  } catch (TException ex) {
505  LOGGER.error("TException - in finalization" + ex.toString());
506  ex.printStackTrace();
507  }
508  }
509  }
510 
511  private void run_init(Connection conn) {
512  // attempt to open file
513  String line = "";
514  try {
515  BufferedReader reader =
516  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
517  Statement stmt = conn.createStatement();
518  while ((line = reader.readLine()) != null) {
519  if (line.isEmpty()) {
520  continue;
521  }
522  LOGGER.info("Running : " + line);
523  stmt.execute(line);
524  }
525  stmt.close();
526  reader.close();
527  } catch (IOException e) {
528  LOGGER.error("Exception occurred trying to read initialize file: "
529  + cmd.getOptionValue("initFile"));
530  exit(1);
531  } catch (SQLException e) {
532  LOGGER.error(
533  "Exception occurred trying to execute initialize file entry : " + line);
534  exit(1);
535  }
536  }
537 
538  private void help(Options options) {
539  // automatically generate the help statement
540  HelpFormatter formatter = new HelpFormatter();
541  formatter.setOptionComparator(null); // get options in the order they are created
542  formatter.printHelp("SQLImporter", options);
543  }
544 
545  private void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
546  throws SQLException {
548  String tName = cmd.getOptionValue("targetTable");
549 
550  if (tableExists(tName)) {
551  // check if we want to truncate
552  if (cmd.hasOption("truncate")) {
553  executeDBCommand("Drop table " + tName);
554  createDBTable(otherdb_conn, md);
555  } else {
556  List<TColumnType> columnInfo = getColumnInfo(tName);
557  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
558  }
559  } else {
560  createDBTable(otherdb_conn, md);
561  }
562  }
563 
564  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
565  List<TColumnType> dstColumns,
566  ResultSetMetaData srcColumns) throws SQLException {
567  if (srcColumns.getColumnCount() != dstColumns.size()) {
568  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
569  + " versus Source " + srcColumns.getColumnCount());
570  exit(1);
571  }
572  for (int i = 1; i <= dstColumns.size(); ++i) {
573  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
574  srcColumns.getColumnName(i))) {
575  LOGGER.error(
576  "Destination table does not have matching column in same order for column number "
577  + i + " destination column name is " + dstColumns.get(i - 1).col_name
578  + " versus target column " + srcColumns.getColumnName(i));
579  exit(1);
580  }
581  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
582  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
583  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
584  int srcType = srcColumns.getColumnType(i);
585  int srcPrecision = srcColumns.getPrecision(i);
586  int srcScale = srcColumns.getScale(i);
587 
588  boolean match = false;
589  switch (srcType) {
590  case java.sql.Types.TINYINT:
591  match |= dstType == TDatumType.TINYINT;
592  // NOTE: it's okay to import smaller type to a bigger one,
593  // so we just fall through and try to match the next type.
594  // But the order of case statements is important here!
595  case java.sql.Types.SMALLINT:
596  match |= dstType == TDatumType.SMALLINT;
597  case java.sql.Types.INTEGER:
598  match |= dstType == TDatumType.INT;
599  case java.sql.Types.BIGINT:
600  match |= dstType == TDatumType.BIGINT;
601  if (cmd.hasOption("AllowIntegerNarrowing")) {
602  match |= dstType == TDatumType.TINYINT || dstType == TDatumType.SMALLINT
603  || dstType == TDatumType.INT;
604  }
605  break;
606  case java.sql.Types.DECIMAL:
607  case java.sql.Types.NUMERIC:
608  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
609  && dstScale == srcScale;
610  break;
611  case java.sql.Types.FLOAT:
612  case java.sql.Types.REAL:
613  match |= dstType == TDatumType.FLOAT;
614  // Fall through and try double
615  case java.sql.Types.DOUBLE:
616  match |= dstType == TDatumType.DOUBLE;
617  if (cmd.hasOption("AllowDoubleToFloat")) {
618  match |= dstType == TDatumType.FLOAT;
619  }
620  break;
621  case java.sql.Types.TIME:
622  match = dstType == TDatumType.TIME;
623  break;
624  case java.sql.Types.TIMESTAMP:
625  match = dstType == TDatumType.TIMESTAMP;
626  break;
627  case java.sql.Types.DATE:
628  match = dstType == TDatumType.DATE;
629  break;
630  case java.sql.Types.BOOLEAN:
631  case java.sql.Types
632  .BIT: // deal with postgres treating boolean as bit... this will bite me
633  match = dstType == TDatumType.BOOL;
634  break;
635  case java.sql.Types.NVARCHAR:
636  case java.sql.Types.VARCHAR:
637  case java.sql.Types.NCHAR:
638  case java.sql.Types.CHAR:
639  case java.sql.Types.LONGVARCHAR:
640  case java.sql.Types.LONGNVARCHAR:
641  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
642  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
643  || dstType == TDatumType.LINESTRING
644  || dstType == TDatumType.MULTILINESTRING
645  || dstType == TDatumType.MULTIPOINT);
646  break;
647  case java.sql.Types.OTHER:
648  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
649  // it makes no difference for OmniSciDB at the moment
650  Db_vendor_types.GisType gisType =
651  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
652  if (gisType.srid != dstScale) {
653  match = false;
654  break;
655  }
656  switch (dstType) {
657  case POINT:
658  match = gisType.type.equalsIgnoreCase("POINT");
659  break;
660  case MULTIPOINT:
661  match = gisType.type.equalsIgnoreCase("MULTIPOINT");
662  break;
663  case LINESTRING:
664  match = gisType.type.equalsIgnoreCase("LINESTRING");
665  break;
666  case MULTILINESTRING:
667  match = gisType.type.equalsIgnoreCase("MULTILINESTRING");
668  break;
669  case POLYGON:
670  match = gisType.type.equalsIgnoreCase("POLYGON");
671  break;
672  case MULTIPOLYGON:
673  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
674  break;
675  default:
676  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
677  + " not Supported");
678  exit(1);
679  }
680  break;
681  default:
682  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
683  + " not Supported");
684  exit(1);
685  }
686  if (!match) {
687  LOGGER.error("Source and destination types for column "
688  + srcColumns.getColumnName(i)
689  + " do not match. Please make sure that type, precision and scale are exactly the same");
690  exit(1);
691  }
692  }
693  }
694 
695  private void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData) {
696  StringBuilder sb = new StringBuilder();
697  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
698 
699  // Now iterate the metadata
700  try {
701  for (int i = 1; i <= metaData.getColumnCount(); i++) {
702  if (i > 1) {
703  sb.append(",");
704  }
705  LOGGER.debug("Column name is " + metaData.getColumnName(i));
706  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
707  LOGGER.debug("Column type is " + metaData.getColumnType(i));
708 
709  sb.append(metaData.getColumnName(i)).append(" ");
710  int col_type = metaData.getColumnType(i);
711  if (col_type == java.sql.Types.OTHER) {
712  Db_vendor_types.GisType type =
713  vendor_types.find_gis_type(otherdb_conn, metaData, i);
714  sb.append(Db_vendor_types.gis_type_to_str(type));
715  } else {
716  sb.append(getColType(metaData.getColumnType(i),
717  metaData.getPrecision(i),
718  metaData.getScale(i)));
719  }
720  }
721  sb.append(")");
722 
723  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
724  sb.append(" with (fragment_size = ");
725  sb.append(cmd.getOptionValue("fragmentSize", "0"));
726  sb.append(")");
727  }
728 
729  } catch (SQLException ex) {
730  LOGGER.error("Error processing the metadata - " + ex.toString());
731  exit(1);
732  }
733 
734  executeDBCommand(sb.toString());
735  }
736 
737  private void createDBConnection() {
738  TTransport transport = null;
739  TProtocol protocol = new TBinaryProtocol(transport);
740  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
741  String server = cmd.getOptionValue("server", "localhost");
742  try {
743  // Uses default certificate stores.
744  boolean load_trust_store = cmd.hasOption("https");
745  SockTransportProperties skT = null;
746  if (cmd.hasOption("https")) {
747  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
748  !cmd.hasOption("insecure"));
749  transport = skT.openHttpsClientTransport(server, port);
750  transport.open();
751  protocol = new TJSONProtocol(transport);
752  } else if (cmd.hasOption("http")) {
753  skT = SockTransportProperties.getUnencryptedClient();
754  transport = skT.openHttpClientTransport(server, port);
755  protocol = new TJSONProtocol(transport);
756  } else {
757  skT = SockTransportProperties.getUnencryptedClient();
758  transport = skT.openClientTransport(server, port);
759  transport.open();
760  protocol = new TBinaryProtocol(transport);
761  }
762 
763  client = new Heavy.Client(protocol);
764  // This if will be useless until PKI signon
765  if (cmd.hasOption("user")) {
766  session = client.connect(cmd.getOptionValue("user", "admin"),
767  cmd.getOptionValue("passwd", "HyperInteractive"),
768  cmd.getOptionValue("database", "omnisci"));
769  }
770  LOGGER.debug("Connected session is " + session);
771 
772  } catch (TTransportException ex) {
773  LOGGER.error("Connection failed - " + ex.toString());
774  exit(1);
775  } catch (TDBException ex) {
776  LOGGER.error("Connection failed - " + ex.getError_msg());
777  exit(2);
778  } catch (TException ex) {
779  LOGGER.error("Connection failed - " + ex.toString());
780  exit(3);
781  } catch (Exception ex) {
782  LOGGER.error("General exception - " + ex.toString());
783  exit(4);
784  }
785  }
786 
787  private List<TColumnType> getColumnInfo(String tName) {
788  LOGGER.debug("Getting columns for " + tName);
789  List<TColumnType> row_descriptor = null;
790  try {
791  TTableDetails table_details = client.get_table_details(session, tName);
792  row_descriptor = table_details.row_desc;
793  } catch (TDBException ex) {
794  LOGGER.error("column check failed - " + ex.getError_msg());
795  exit(3);
796  } catch (TException ex) {
797  LOGGER.error("column check failed - " + ex.toString());
798  exit(3);
799  }
800  return row_descriptor;
801  }
802 
803  private boolean tableExists(String tName) {
804  LOGGER.debug("Check for table " + tName);
805  try {
806  List<String> recv_get_tables = client.get_tables(session);
807  for (String s : recv_get_tables) {
808  if (s.equals(tName)) {
809  return true;
810  }
811  }
812  } catch (TDBException ex) {
813  LOGGER.error("Table check failed - " + ex.getError_msg());
814  exit(3);
815  } catch (TException ex) {
816  LOGGER.error("Table check failed - " + ex.toString());
817  exit(3);
818  }
819  return false;
820  }
821 
822  private void executeDBCommand(String sql) {
823  LOGGER.info("Run Command - " + sql);
824 
825  try {
826  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
827  } catch (TDBException ex) {
828  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
829  exit(1);
830  } catch (TException ex) {
831  LOGGER.error("SQL Execute failed - " + ex.toString());
832  exit(1);
833  }
834  }
835 
836  private String getColType(int cType, int precision, int scale) {
837  // Note - if cType is OTHER a earlier call will have been made
838  // to try and work out the db vendors specific type.
839  if (precision > 19) {
840  precision = 19;
841  }
842  if (scale > 19) {
843  scale = 18;
844  }
845  switch (cType) {
846  case java.sql.Types.TINYINT:
847  return ("TINYINT");
848  case java.sql.Types.SMALLINT:
849  return ("SMALLINT");
850  case java.sql.Types.INTEGER:
851  return ("INTEGER");
852  case java.sql.Types.BIGINT:
853  return ("BIGINT");
854  case java.sql.Types.FLOAT:
855  return ("FLOAT");
856  case java.sql.Types.DECIMAL:
857  return ("DECIMAL(" + precision + "," + scale + ")");
858  case java.sql.Types.DOUBLE:
859  return ("DOUBLE");
860  case java.sql.Types.REAL:
861  return ("REAL");
862  case java.sql.Types.NUMERIC:
863  return ("NUMERIC(" + precision + "," + scale + ")");
864  case java.sql.Types.TIME:
865  return ("TIME");
866  case java.sql.Types.TIMESTAMP:
867  return ("TIMESTAMP");
868  case java.sql.Types.DATE:
869  return ("DATE");
870  case java.sql.Types.BOOLEAN:
871  case java.sql.Types
872  .BIT: // deal with postgress treating boolean as bit... this will bite me
873  return ("BOOLEAN");
874  case java.sql.Types.NVARCHAR:
875  case java.sql.Types.VARCHAR:
876  case java.sql.Types.NCHAR:
877  case java.sql.Types.CHAR:
878  case java.sql.Types.LONGVARCHAR:
879  case java.sql.Types.LONGNVARCHAR:
880  return ("TEXT ENCODING DICT");
881  default:
882  throw new AssertionError("Column type " + cType + " not Supported");
883  }
884  }
885 
886  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
887  throws SQLException {
888  TColumn col = new TColumn();
889 
890  col.nulls = new ArrayList<Boolean>(bufferSize);
891 
892  col.data = new TColumnData();
893 
894  switch (md.getColumnType(i)) {
895  case java.sql.Types.TINYINT:
896  case java.sql.Types.SMALLINT:
897  case java.sql.Types.INTEGER:
898  case java.sql.Types.BIGINT:
899  case java.sql.Types.TIME:
900  case java.sql.Types.TIMESTAMP:
901  case java.sql.Types
902  .BIT: // deal with postgress treating boolean as bit... this will bite me
903  case java.sql.Types.BOOLEAN:
904  case java.sql.Types.DATE:
905  case java.sql.Types.DECIMAL:
906  case java.sql.Types.NUMERIC:
907  col.data.int_col = new ArrayList<Long>(bufferSize);
908  break;
909 
910  case java.sql.Types.FLOAT:
911  case java.sql.Types.DOUBLE:
912  case java.sql.Types.REAL:
913  col.data.real_col = new ArrayList<Double>(bufferSize);
914  break;
915 
916  case java.sql.Types.NVARCHAR:
917  case java.sql.Types.VARCHAR:
918  case java.sql.Types.NCHAR:
919  case java.sql.Types.CHAR:
920  case java.sql.Types.LONGVARCHAR:
921  case java.sql.Types.LONGNVARCHAR:
922  case java.sql.Types.OTHER:
923  col.data.str_col = new ArrayList<String>(bufferSize);
924  break;
925 
926  default:
927  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
928  }
929  return col;
930  }
931 
932  private void setColValue(ResultSet rs,
933  TColumn col,
934  int columnType,
935  int colNum,
936  int scale,
937  String colTypeName) throws SQLException {
938  switch (columnType) {
939  case java.sql.Types
940  .BIT: // deal with postgress treating boolean as bit... this will bite me
941  case java.sql.Types.BOOLEAN:
942  Boolean b = rs.getBoolean(colNum);
943  if (rs.wasNull()) {
944  col.nulls.add(Boolean.TRUE);
945  col.data.int_col.add(0L);
946  } else {
947  col.nulls.add(Boolean.FALSE);
948  col.data.int_col.add(b ? 1L : 0L);
949  }
950  break;
951 
952  case java.sql.Types.DECIMAL:
953  case java.sql.Types.NUMERIC:
954  BigDecimal bd = rs.getBigDecimal(colNum);
955  if (rs.wasNull()) {
956  col.nulls.add(Boolean.TRUE);
957  col.data.int_col.add(0L);
958  } else {
959  col.nulls.add(Boolean.FALSE);
960  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
961  }
962  break;
963 
964  case java.sql.Types.TINYINT:
965  case java.sql.Types.SMALLINT:
966  case java.sql.Types.INTEGER:
967  case java.sql.Types.BIGINT:
968  Long l = rs.getLong(colNum);
969  if (rs.wasNull()) {
970  col.nulls.add(Boolean.TRUE);
971  col.data.int_col.add(new Long(0));
972  } else {
973  col.nulls.add(Boolean.FALSE);
974  col.data.int_col.add(l);
975  }
976  break;
977 
978  case java.sql.Types.TIME:
979  Time t = rs.getTime(colNum);
980  if (rs.wasNull()) {
981  col.nulls.add(Boolean.TRUE);
982  col.data.int_col.add(0L);
983 
984  } else {
985  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
986  col.nulls.add(Boolean.FALSE);
987  }
988 
989  break;
990  case java.sql.Types.TIMESTAMP:
991  Timestamp ts = rs.getTimestamp(colNum);
992  if (rs.wasNull()) {
993  col.nulls.add(Boolean.TRUE);
994  col.data.int_col.add(0L);
995 
996  } else {
997  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
998  col.nulls.add(Boolean.FALSE);
999  }
1000 
1001  break;
1002  case java.sql.Types.DATE:
1003  Date d = rs.getDate(colNum);
1004  if (rs.wasNull()) {
1005  col.nulls.add(Boolean.TRUE);
1006  col.data.int_col.add(0L);
1007 
1008  } else {
1009  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
1010  col.nulls.add(Boolean.FALSE);
1011  }
1012  break;
1013  case java.sql.Types.FLOAT:
1014  case java.sql.Types.DOUBLE:
1015  case java.sql.Types.REAL:
1016  Double db = rs.getDouble(colNum);
1017  if (rs.wasNull()) {
1018  col.nulls.add(Boolean.TRUE);
1019  col.data.real_col.add(new Double(0));
1020 
1021  } else {
1022  col.nulls.add(Boolean.FALSE);
1023  col.data.real_col.add(db);
1024  }
1025  break;
1026 
1027  case java.sql.Types.NVARCHAR:
1028  case java.sql.Types.VARCHAR:
1029  case java.sql.Types.NCHAR:
1030  case java.sql.Types.CHAR:
1031  case java.sql.Types.LONGVARCHAR:
1032  case java.sql.Types.LONGNVARCHAR:
1033  String strVal = rs.getString(colNum);
1034  if (rs.wasNull()) {
1035  col.nulls.add(Boolean.TRUE);
1036  col.data.str_col.add("");
1037 
1038  } else {
1039  col.data.str_col.add(strVal);
1040  col.nulls.add(Boolean.FALSE);
1041  }
1042  break;
1043  case java.sql.Types.OTHER:
1044  Object objVal = rs.getObject(colNum);
1045  if (rs.wasNull()) {
1046  col.nulls.add(Boolean.TRUE);
1047  col.data.str_col.add("");
1048  } else {
1049  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1050  col.nulls.add(Boolean.FALSE);
1051  }
1052  break;
1053  default:
1054  throw new AssertionError("Column type " + columnType + " not Supported");
1055  }
1056  }
1057 
1058  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1059  throws SQLException {
1060  col.nulls.clear();
1061 
1062  switch (md.getColumnType(i)) {
1063  case java.sql.Types.TINYINT:
1064  case java.sql.Types.SMALLINT:
1065  case java.sql.Types.INTEGER:
1066  case java.sql.Types.BIGINT:
1067  case java.sql.Types.TIME:
1068  case java.sql.Types.TIMESTAMP:
1069  case java.sql.Types
1070  .BIT: // deal with postgress treating boolean as bit... this will bite me
1071  case java.sql.Types.BOOLEAN:
1072  case java.sql.Types.DATE:
1073  case java.sql.Types.DECIMAL:
1074  case java.sql.Types.NUMERIC:
1075  col.data.int_col.clear();
1076  break;
1077 
1078  case java.sql.Types.FLOAT:
1079  case java.sql.Types.DOUBLE:
1080  case java.sql.Types.REAL:
1081  col.data.real_col.clear();
1082  break;
1083 
1084  case java.sql.Types.NVARCHAR:
1085  case java.sql.Types.VARCHAR:
1086  case java.sql.Types.NCHAR:
1087  case java.sql.Types.CHAR:
1088  case java.sql.Types.LONGVARCHAR:
1089  case java.sql.Types.LONGNVARCHAR:
1090  case java.sql.Types.OTHER:
1091  col.data.str_col.clear();
1092  break;
1093  default:
1094  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1095  }
1096  }
1097 }
void createDBTable(Connection otherdb_conn, ResultSetMetaData metaData)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:168
String getColType(int cType, int precision, int scale)
void executeDBCommand(String sql)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
void checkDBTable(Connection otherdb_conn, ResultSetMetaData md)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)