OmniSciDB  06b3bd477c
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 import com.omnisci.thrift.server.*;
24 
25 import org.apache.commons.cli.*;
26 import org.apache.thrift.TException;
27 import org.apache.thrift.protocol.TBinaryProtocol;
28 import org.apache.thrift.protocol.TJSONProtocol;
29 import org.apache.thrift.protocol.TProtocol;
30 import org.apache.thrift.transport.TSocket;
31 import org.apache.thrift.transport.TTransport;
32 import org.apache.thrift.transport.TTransportException;
33 import org.slf4j.Logger;
34 import org.slf4j.LoggerFactory;
35 
36 import java.io.BufferedReader;
37 import java.io.FileReader;
38 import java.io.IOException;
39 import java.math.BigDecimal;
40 import java.security.KeyStore;
41 import java.sql.*;
42 import java.time.*;
43 import java.util.ArrayList;
44 import java.util.List;
45 
46 interface DateTimeUtils {
47  long getSecondsFromMilliseconds(long milliseconds);
48 }
49 
50 class MutuallyExlusiveOptionsException extends ParseException {
51  protected MutuallyExlusiveOptionsException(String message) {
52  super(message);
53  }
54 
55  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
56  StringBuffer sb = new StringBuffer(
57  "Mutually exclusive options used. " + errMsg + ". Options provided [");
58  for (String s : strings) {
59  sb.append(s);
60  sb.append(" ");
61  }
62  sb.setCharAt(sb.length() - 1, ']');
63  return new MutuallyExlusiveOptionsException(sb.toString());
64  }
65 }
67  private Options options = new Options();
68 
69  void printVersion() {
70  System.out.println("SQLImporter Version 4.6.0");
71  }
72 
74  StringBuffer sb = new StringBuffer("\nSQLImporter ");
75  // Ready for PKI auth
76  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
77  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
78  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
79  // sb.append("([--ca-trust-store <ca trust store file name>]
80  // --ca-trust-store-password
81  // <trust store password> | --insecure)\n");
82  sb.append(
83  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
84  sb.append(
85  "-su <other database user> -sp <other database user password> -su <other database sql statement>\n");
86  sb.append(
87  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
88  sb.append("[-tr] -i <init commands file>\n");
89  sb.append("\nSQLImporter -h | --help\n\n");
90 
91  HelpFormatter formatter = new HelpFormatter();
92  // Forces help to print out options in order they were added rather
93  // than in alphabetical order
94  formatter.setOptionComparator(null);
95  int help_width = 100;
96  formatter.printHelp(help_width, sb.toString(), "", options, "");
97  }
98 
100  options.addOption("r", true, "Row Load Limit");
101 
102  // OmniSci authentication options
103  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
104  options.addOption(
105  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
106  options.addOption(Option.builder("p")
107  .hasArg()
108  .desc("OmniSci Password")
109  .longOpt("passwd")
110  .build());
111  // OmniSci transport options
112  OptionGroup transport_grp = new OptionGroup();
113  transport_grp.addOption(Option.builder()
114  .desc("use binary transport to connect to OmniSci ")
115  .longOpt("binary")
116  .build());
117  transport_grp.addOption(Option.builder()
118  .desc("use http transport to connect to OmniSci ")
119  .longOpt("http")
120  .build());
121  transport_grp.addOption(Option.builder()
122  .desc("use https transport to connect to OmniSci ")
123  .longOpt("https")
124  .build());
125  options.addOptionGroup(transport_grp);
126 
127  // OmniSci database server details
128  options.addOption(Option.builder("s")
129  .hasArg()
130  .desc("OmniSci Server")
131  .longOpt("server")
132  .build());
133  options.addOption(Option.builder("db")
134  .hasArg()
135  .desc("OmniSci Database")
136  .longOpt("database")
137  .build());
138  options.addOption(
139  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
140 
141  // OmniSci server authentication options
142  options.addOption(Option.builder()
143  .hasArg()
144  .desc("CA certificate trust store")
145  .longOpt("ca-trust-store")
146  .build());
147  options.addOption(Option.builder()
148  .hasArg()
149  .desc("CA certificate trust store password")
150  .longOpt("ca-trust-store-passwd")
151  .build());
152  options.addOption(
153  Option.builder()
154  .desc("Inseure TLS - do not validate server OmniSci server credentials")
155  .longOpt("insecure")
156  .build());
157 
158  // Other database connection details
159  options.addOption(Option.builder("d")
160  .hasArg()
161  .desc("JDBC driver class")
162  .longOpt("driver")
163  .build());
164  options.addOption(Option.builder("c")
165  .hasArg()
166  .desc("JDBC Connection string")
167  .longOpt("jdbcConnect")
168  .required()
169  .build());
170  options.addOption(Option.builder("su")
171  .hasArg()
172  .desc("Source User")
173  .longOpt("sourceUser")
174  .required()
175  .build());
176  options.addOption(Option.builder("sp")
177  .hasArg()
178  .desc("Source Password")
179  .longOpt("sourcePasswd")
180  .required()
181  .build());
182  options.addOption(Option.builder("ss")
183  .hasArg()
184  .desc("SQL Select statement")
185  .longOpt("sqlStmt")
186  .required()
187  .build());
188 
189  options.addOption(Option.builder("t")
190  .hasArg()
191  .desc("OmniSci Target Table")
192  .longOpt("targetTable")
193  .required()
194  .build());
195 
196  options.addOption(Option.builder("b")
197  .hasArg()
198  .desc("transfer buffer size")
199  .longOpt("bufferSize")
200  .build());
201  options.addOption(Option.builder("f")
202  .hasArg()
203  .desc("table fragment size")
204  .longOpt("fragmentSize")
205  .build());
206 
207  options.addOption(Option.builder("tr")
208  .desc("Truncate table if it exists")
209  .longOpt("truncate")
210  .build());
211  options.addOption(Option.builder("i")
212  .hasArg()
213  .desc("File containing init command for DB")
214  .longOpt("initializeFile")
215  .build());
216  }
217 
218  private Option setOptionRequired(Option option) {
219  option.setRequired(true);
220  return option;
221  }
222 
223  public CommandLine parse(String[] args) throws ParseException {
224  CommandLineParser clp = new DefaultParser() {
225  public CommandLine parse(Options options, String[] strings) throws ParseException {
226  Options helpOptions = new Options();
227  helpOptions.addOption(
228  Option.builder("h").desc("help message").longOpt("help").build());
229  try {
230  CommandLine cmd = super.parse(helpOptions, strings);
231  } catch (UnrecognizedOptionException uE) {
232  }
233  if (cmd.hasOption("help")) {
235  exit(0);
236  }
237  if (cmd.hasOption("version")) {
238  printVersion();
239  exit(0);
240  }
241  cmd = super.parse(options, strings);
242  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
243  throw new MissingArgumentException(
244  "Must supply either an OmniSci db user or a user certificate");
245  }
246  // if user supplied must have password and visa versa
247  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
248  options.addOption(setOptionRequired(options.getOption("user")));
249  options.addOption(setOptionRequired(options.getOption("passwd")));
250  super.parse(options, strings);
251  }
252 
253  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
254  // and
255  // visa versa
256  if (false) {
257  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
258  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
259  options.addOption(
260  setOptionRequired(options.getOption("ca-trust-store-password")));
261  super.parse(options, strings);
262  }
263  if (options.getOption("user").isRequired()
264  && options.getOption("client-key").isRequired()) {
266  MutuallyExlusiveOptionsException.create(
267  "user/password can not be use with client-cert/client-key",
268  strings);
269  throw meo;
270  }
271 
272  if (cmd.hasOption("http")
273  || cmd.hasOption("binary")
274  && (cmd.hasOption("client-cert")
275  || cmd.hasOption("client-key"))) {
276  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
277  "http|binary can not be use with ca-cert|client-cert|client-key",
278  strings);
279  }
280  }
281 
282  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
283  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
284  "insecure can only be use with https", strings);
285  throw meo;
286  }
287 
288  return cmd;
289  }
290 
291  public CommandLine parse(Options options, String[] strings, boolean b)
292  throws ParseException {
293  return null;
294  }
295  };
296  return clp.parse(options, args);
297  }
298 }
299 
300 public class SQLImporter {
301  protected String session = null;
302  protected OmniSci.Client client = null;
303  private CommandLine cmd = null;
304  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
305  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
306  return milliseconds / 1000;
307  };
308 
310 
311  public static void main(String[] args) {
312  SQLImporter sq = new SQLImporter();
313  sq.doWork(args);
314  }
315 
316  void doWork(String[] args) {
317  // create Options object
318 
319  SQLImporter_args s_args = new SQLImporter_args();
320 
321  try {
322  cmd = s_args.parse(args);
323  } catch (ParseException ex) {
324  LOGGER.error(ex.getLocalizedMessage());
325  s_args.printHelpMessage();
326  exit(0);
327  }
328  executeQuery();
329  }
330 
331  void executeQuery() {
332  Connection conn = null;
333  Statement stmt = null;
334 
335  long totalTime = 0;
336 
337  try {
338  // Open a connection
339  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
340  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
341  cmd.getOptionValue("sourceUser"),
342  cmd.getOptionValue("sourcePasswd"));
343  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
344  long startTime = System.currentTimeMillis();
345 
346  // run init file script on targe DB if present
347  if (cmd.hasOption("initializeFile")) {
348  run_init(conn);
349  }
350 
351  // set autocommit off to allow postgress to not load all results
352  try {
353  conn.setAutoCommit(false);
354  } catch (SQLException se) {
355  LOGGER.warn(
356  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
357  + se.toString());
358  }
359 
360  // Execute a query
361  stmt = conn.createStatement();
362 
363  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
364  // set the jdbc fetch buffer size to reduce the amount of records being moved to
365  // java from postgress
366  stmt.setFetchSize(bufferSize);
367  long timer;
368 
369  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
370 
371  // check if table already exists and is compatible in OmniSci with the query
372  // metadata
373  ResultSetMetaData md = rs.getMetaData();
374  checkMapDTable(conn, md);
375 
376  timer = System.currentTimeMillis();
377 
378  long resultCount = 0;
379  int bufferCount = 0;
380  long total = 0;
381 
382  List<TColumn> cols = new ArrayList(md.getColumnCount());
383  for (int i = 1; i <= md.getColumnCount(); i++) {
384  TColumn col = setupBinaryColumn(i, md, bufferSize);
385  cols.add(col);
386  }
387 
388  // read data from old DB
389  while (rs.next()) {
390  for (int i = 1; i <= md.getColumnCount(); i++) {
391  setColValue(rs,
392  cols.get(i - 1),
393  md.getColumnType(i),
394  i,
395  md.getScale(i),
396  md.getColumnTypeName(i));
397  }
398  resultCount++;
399  bufferCount++;
400  if (bufferCount == bufferSize) {
401  bufferCount = 0;
402  // send the buffer to mapD
403  client.load_table_binary_columnar(
404  session, cmd.getOptionValue("targetTable"), cols); // old
405  // recreate columnar store for use
406  for (int i = 1; i <= md.getColumnCount(); i++) {
407  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
408  }
409 
410  if (resultCount % 100000 == 0) {
411  LOGGER.info("Imported " + resultCount + " records");
412  }
413  }
414  }
415  if (bufferCount > 0) {
416  // send the LAST buffer to mapD
417  client.load_table_binary_columnar(
418  session, cmd.getOptionValue("targetTable"), cols);
419  bufferCount = 0;
420  }
421  LOGGER.info("result set count is " + resultCount + " read time is "
422  + (System.currentTimeMillis() - timer) + "ms");
423 
424  // Clean-up environment
425  rs.close();
426  stmt.close();
427 
428  totalTime = System.currentTimeMillis() - startTime;
429  conn.close();
430  } catch (SQLException se) {
431  LOGGER.error("SQLException - " + se.toString());
432  se.printStackTrace();
433  } catch (TOmniSciException ex) {
434  LOGGER.error("TOmniSciException - " + ex.toString());
435  ex.printStackTrace();
436  } catch (TException ex) {
437  LOGGER.error("TException failed - " + ex.toString());
438  ex.printStackTrace();
439  } finally {
440  // finally block used to close resources
441  try {
442  if (stmt != null) {
443  stmt.close();
444  }
445  } catch (SQLException se2) {
446  } // nothing we can do
447  try {
448  if (conn != null) {
449  conn.close();
450  }
451  } catch (SQLException se) {
452  LOGGER.error("SQlException in close - " + se.toString());
453  se.printStackTrace();
454  } // end finally try
455  } // end try
456  }
457 
458  private void run_init(Connection conn) {
459  // attempt to open file
460  String line = "";
461  try {
462  BufferedReader reader =
463  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
464  Statement stmt = conn.createStatement();
465  while ((line = reader.readLine()) != null) {
466  if (line.isEmpty()) {
467  continue;
468  }
469  LOGGER.info("Running : " + line);
470  stmt.execute(line);
471  }
472  stmt.close();
473  reader.close();
474  } catch (IOException e) {
475  LOGGER.error("Exception occurred trying to read initialize file: "
476  + cmd.getOptionValue("initFile"));
477  exit(1);
478  } catch (SQLException e) {
479  LOGGER.error(
480  "Exception occurred trying to execute initialize file entry : " + line);
481  exit(1);
482  }
483  }
484 
485  private void help(Options options) {
486  // automatically generate the help statement
487  HelpFormatter formatter = new HelpFormatter();
488  formatter.setOptionComparator(null); // get options in the order they are created
489  formatter.printHelp("SQLImporter", options);
490  }
491 
492  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
493  throws SQLException {
495  String tName = cmd.getOptionValue("targetTable");
496 
497  if (tableExists(tName)) {
498  // check if we want to truncate
499  if (cmd.hasOption("truncate")) {
500  executeMapDCommand("Drop table " + tName);
501  createMapDTable(otherdb_conn, md);
502  } else {
503  List<TColumnType> columnInfo = getColumnInfo(tName);
504  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
505  }
506  } else {
507  createMapDTable(otherdb_conn, md);
508  }
509  }
510 
511  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
512  List<TColumnType> dstColumns,
513  ResultSetMetaData srcColumns) throws SQLException {
514  if (srcColumns.getColumnCount() != dstColumns.size()) {
515  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
516  + " versus Source " + srcColumns.getColumnCount());
517  exit(1);
518  }
519  for (int i = 1; i <= dstColumns.size(); ++i) {
520  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
521  srcColumns.getColumnName(i))) {
522  LOGGER.error(
523  "Destination table does not have matching column in same order for column number"
524  + i + " destination column name is " + dstColumns.get(i - 1).col_name
525  + " versus Select " + srcColumns.getColumnName(i));
526  exit(1);
527  }
528  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
529  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
530  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
531  int srcType = srcColumns.getColumnType(i);
532  int srcPrecision = srcColumns.getPrecision(i);
533  int srcScale = srcColumns.getScale(i);
534 
535  boolean match = false;
536  switch (srcType) {
537  case java.sql.Types.TINYINT:
538  match |= dstType == TDatumType.TINYINT;
539  // NOTE: it's okay to import smaller type to a bigger one,
540  // so we just fall through and try to match the next type.
541  // But the order of case statements is important here!
542  case java.sql.Types.SMALLINT:
543  match |= dstType == TDatumType.SMALLINT;
544  case java.sql.Types.INTEGER:
545  match |= dstType == TDatumType.INT;
546  case java.sql.Types.BIGINT:
547  match |= dstType == TDatumType.BIGINT;
548  break;
549  case java.sql.Types.DECIMAL:
550  case java.sql.Types.NUMERIC:
551  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
552  && dstScale == srcScale;
553  break;
554  case java.sql.Types.FLOAT:
555  case java.sql.Types.REAL:
556  match |= dstType == TDatumType.FLOAT;
557  // Fall through and try double
558  case java.sql.Types.DOUBLE:
559  match |= dstType == TDatumType.DOUBLE;
560  break;
561  case java.sql.Types.TIME:
562  match = dstType == TDatumType.TIME;
563  break;
564  case java.sql.Types.TIMESTAMP:
565  match = dstType == TDatumType.TIMESTAMP;
566  break;
567  case java.sql.Types.DATE:
568  match = dstType == TDatumType.DATE;
569  break;
570  case java.sql.Types.BOOLEAN:
571  case java.sql.Types
572  .BIT: // deal with postgres treating boolean as bit... this will bite me
573  match = dstType == TDatumType.BOOL;
574  break;
575  case java.sql.Types.NVARCHAR:
576  case java.sql.Types.VARCHAR:
577  case java.sql.Types.NCHAR:
578  case java.sql.Types.CHAR:
579  case java.sql.Types.LONGVARCHAR:
580  case java.sql.Types.LONGNVARCHAR:
581  match = dstType == TDatumType.STR;
582  break;
583  case java.sql.Types.OTHER:
584  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because it
585  // makes no difference for OmniSciDB at the moment
586  Db_vendor_types.GisType gisType =
587  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
588  if (gisType.srid != dstScale) {
589  match = false;
590  break;
591  }
592  switch (dstType) {
593  case POINT:
594  match = gisType.type.equalsIgnoreCase("POINT");
595  break;
596  case LINESTRING:
597  match = gisType.type.equalsIgnoreCase("LINESTRING");
598  break;
599  case POLYGON:
600  match = gisType.type.equalsIgnoreCase("POLYGON");
601  break;
602  case MULTIPOLYGON:
603  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
604  break;
605  default:
606  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
607  + " not Supported");
608  exit(1);
609  }
610  break;
611  default:
612  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
613  + " not Supported");
614  exit(1);
615  }
616  if (!match) {
617  LOGGER.error("Source and destination types for column "
618  + srcColumns.getColumnName(i)
619  + " do not match. Please make sure that type, precision and scale are exactly the same");
620  exit(1);
621  }
622  }
623  }
624 
625  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
626  StringBuilder sb = new StringBuilder();
627  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
628 
629  // Now iterate the metadata
630  try {
631  for (int i = 1; i <= metaData.getColumnCount(); i++) {
632  if (i > 1) {
633  sb.append(",");
634  }
635  LOGGER.debug("Column name is " + metaData.getColumnName(i));
636  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
637  LOGGER.debug("Column type is " + metaData.getColumnType(i));
638 
639  sb.append(metaData.getColumnName(i)).append(" ");
640  int col_type = metaData.getColumnType(i);
641  if (col_type == java.sql.Types.OTHER) {
642  Db_vendor_types.GisType type =
643  vendor_types.find_gis_type(otherdb_conn, metaData, i);
644  sb.append(Db_vendor_types.gis_type_to_str(type));
645  } else {
646  sb.append(getColType(metaData.getColumnType(i),
647  metaData.getPrecision(i),
648  metaData.getScale(i)));
649  }
650  }
651  sb.append(")");
652 
653  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
654  sb.append(" with (fragment_size = ");
655  sb.append(cmd.getOptionValue("fragmentSize", "0"));
656  sb.append(")");
657  }
658 
659  } catch (SQLException ex) {
660  LOGGER.error("Error processing the metadata - " + ex.toString());
661  exit(1);
662  }
663 
664  executeMapDCommand(sb.toString());
665  }
666 
667  private void createMapDConnection() {
668  TTransport transport = null;
669  TProtocol protocol = new TBinaryProtocol(transport);
670  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
671  String server = cmd.getOptionValue("server", "localhost");
672  try {
673  // Uses default certificate stores.
674  boolean load_trust_store = cmd.hasOption("https");
675  SockTransportProperties skT = null;
676  if (cmd.hasOption("https")) {
677  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
678  !cmd.hasOption("insecure"));
679  transport = skT.openHttpsClientTransport(server, port);
680  transport.open();
681  protocol = new TJSONProtocol(transport);
682  } else if (cmd.hasOption("http")) {
683  skT = SockTransportProperties.getUnencryptedClient();
684  transport = skT.openHttpClientTransport(server, port);
685  protocol = new TJSONProtocol(transport);
686  } else {
687  skT = SockTransportProperties.getUnencryptedClient();
688  transport = skT.openClientTransport(server, port);
689  transport.open();
690  protocol = new TBinaryProtocol(transport);
691  }
692 
693  client = new OmniSci.Client(protocol);
694  // This if will be useless until PKI signon
695  if (cmd.hasOption("user")) {
696  session = client.connect(cmd.getOptionValue("user", "admin"),
697  cmd.getOptionValue("passwd", "HyperInteractive"),
698  cmd.getOptionValue("database", "omnisci"));
699  }
700  LOGGER.debug("Connected session is " + session);
701 
702  } catch (TTransportException ex) {
703  LOGGER.error("Connection failed - " + ex.toString());
704  exit(1);
705  } catch (TOmniSciException ex) {
706  LOGGER.error("Connection failed - " + ex.toString());
707  exit(2);
708  } catch (TException ex) {
709  LOGGER.error("Connection failed - " + ex.toString());
710  exit(3);
711  } catch (Exception ex) {
712  LOGGER.error("General exception - " + ex.toString());
713  exit(4);
714  }
715  }
716 
717  private List<TColumnType> getColumnInfo(String tName) {
718  LOGGER.debug("Getting columns for " + tName);
719  List<TColumnType> row_descriptor = null;
720  try {
721  TTableDetails table_details = client.get_table_details(session, tName);
722  row_descriptor = table_details.row_desc;
723  } catch (TOmniSciException ex) {
724  LOGGER.error("column check failed - " + ex.toString());
725  exit(3);
726  } catch (TException ex) {
727  LOGGER.error("column check failed - " + ex.toString());
728  exit(3);
729  }
730  return row_descriptor;
731  }
732 
733  private boolean tableExists(String tName) {
734  LOGGER.debug("Check for table " + tName);
735  try {
736  List<String> recv_get_tables = client.get_tables(session);
737  for (String s : recv_get_tables) {
738  if (s.equals(tName)) {
739  return true;
740  }
741  }
742  } catch (TOmniSciException ex) {
743  LOGGER.error("Table check failed - " + ex.toString());
744  exit(3);
745  } catch (TException ex) {
746  LOGGER.error("Table check failed - " + ex.toString());
747  exit(3);
748  }
749  return false;
750  }
751 
752  private void executeMapDCommand(String sql) {
753  LOGGER.info(" run comamnd :" + sql);
754 
755  try {
756  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
757  } catch (TOmniSciException ex) {
758  LOGGER.error("SQL Execute failed - " + ex.toString());
759  exit(1);
760  } catch (TException ex) {
761  LOGGER.error("SQL Execute failed - " + ex.toString());
762  exit(1);
763  }
764  }
765 
766  private String getColType(int cType, int precision, int scale) {
767  // Note - if cType is OTHER a earlier call will have been made
768  // to try and work out the db vendors specific type.
769  if (precision > 19) {
770  precision = 19;
771  }
772  if (scale > 19) {
773  scale = 18;
774  }
775  switch (cType) {
776  case java.sql.Types.TINYINT:
777  return ("TINYINT");
778  case java.sql.Types.SMALLINT:
779  return ("SMALLINT");
780  case java.sql.Types.INTEGER:
781  return ("INTEGER");
782  case java.sql.Types.BIGINT:
783  return ("BIGINT");
784  case java.sql.Types.FLOAT:
785  return ("FLOAT");
786  case java.sql.Types.DECIMAL:
787  return ("DECIMAL(" + precision + "," + scale + ")");
788  case java.sql.Types.DOUBLE:
789  return ("DOUBLE");
790  case java.sql.Types.REAL:
791  return ("REAL");
792  case java.sql.Types.NUMERIC:
793  return ("NUMERIC(" + precision + "," + scale + ")");
794  case java.sql.Types.TIME:
795  return ("TIME");
796  case java.sql.Types.TIMESTAMP:
797  return ("TIMESTAMP");
798  case java.sql.Types.DATE:
799  return ("DATE");
800  case java.sql.Types.BOOLEAN:
801  case java.sql.Types
802  .BIT: // deal with postgress treating boolean as bit... this will bite me
803  return ("BOOLEAN");
804  case java.sql.Types.NVARCHAR:
805  case java.sql.Types.VARCHAR:
806  case java.sql.Types.NCHAR:
807  case java.sql.Types.CHAR:
808  case java.sql.Types.LONGVARCHAR:
809  case java.sql.Types.LONGNVARCHAR:
810  return ("TEXT ENCODING DICT");
811  default:
812  throw new AssertionError("Column type " + cType + " not Supported");
813  }
814  }
815 
816  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
817  throws SQLException {
818  TColumn col = new TColumn();
819 
820  col.nulls = new ArrayList<Boolean>(bufferSize);
821 
822  col.data = new TColumnData();
823 
824  switch (md.getColumnType(i)) {
825  case java.sql.Types.TINYINT:
826  case java.sql.Types.SMALLINT:
827  case java.sql.Types.INTEGER:
828  case java.sql.Types.BIGINT:
829  case java.sql.Types.TIME:
830  case java.sql.Types.TIMESTAMP:
831  case java.sql.Types
832  .BIT: // deal with postgress treating boolean as bit... this will bite me
833  case java.sql.Types.BOOLEAN:
834  case java.sql.Types.DATE:
835  case java.sql.Types.DECIMAL:
836  case java.sql.Types.NUMERIC:
837  col.data.int_col = new ArrayList<Long>(bufferSize);
838  break;
839 
840  case java.sql.Types.FLOAT:
841  case java.sql.Types.DOUBLE:
842  case java.sql.Types.REAL:
843  col.data.real_col = new ArrayList<Double>(bufferSize);
844  break;
845 
846  case java.sql.Types.NVARCHAR:
847  case java.sql.Types.VARCHAR:
848  case java.sql.Types.NCHAR:
849  case java.sql.Types.CHAR:
850  case java.sql.Types.LONGVARCHAR:
851  case java.sql.Types.LONGNVARCHAR:
852  case java.sql.Types.OTHER:
853  col.data.str_col = new ArrayList<String>(bufferSize);
854  break;
855 
856  default:
857  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
858  }
859  return col;
860  }
861 
862  private void setColValue(ResultSet rs,
863  TColumn col,
864  int columnType,
865  int colNum,
866  int scale,
867  String colTypeName) throws SQLException {
868  switch (columnType) {
869  case java.sql.Types
870  .BIT: // deal with postgress treating boolean as bit... this will bite me
871  case java.sql.Types.BOOLEAN:
872  Boolean b = rs.getBoolean(colNum);
873  if (rs.wasNull()) {
874  col.nulls.add(Boolean.TRUE);
875  col.data.int_col.add(0L);
876  } else {
877  col.nulls.add(Boolean.FALSE);
878  col.data.int_col.add(b ? 1L : 0L);
879  }
880  break;
881 
882  case java.sql.Types.DECIMAL:
883  case java.sql.Types.NUMERIC:
884  BigDecimal bd = rs.getBigDecimal(colNum);
885  if (rs.wasNull()) {
886  col.nulls.add(Boolean.TRUE);
887  col.data.int_col.add(0L);
888  } else {
889  col.nulls.add(Boolean.FALSE);
890  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
891  }
892  break;
893 
894  case java.sql.Types.TINYINT:
895  case java.sql.Types.SMALLINT:
896  case java.sql.Types.INTEGER:
897  case java.sql.Types.BIGINT:
898  Long l = rs.getLong(colNum);
899  if (rs.wasNull()) {
900  col.nulls.add(Boolean.TRUE);
901  col.data.int_col.add(new Long(0));
902  } else {
903  col.nulls.add(Boolean.FALSE);
904  col.data.int_col.add(l);
905  }
906  break;
907 
908  case java.sql.Types.TIME:
909  Time t = rs.getTime(colNum);
910  if (rs.wasNull()) {
911  col.nulls.add(Boolean.TRUE);
912  col.data.int_col.add(0L);
913 
914  } else {
915  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
916  col.nulls.add(Boolean.FALSE);
917  }
918 
919  break;
920  case java.sql.Types.TIMESTAMP:
921  Timestamp ts = rs.getTimestamp(colNum);
922  if (rs.wasNull()) {
923  col.nulls.add(Boolean.TRUE);
924  col.data.int_col.add(0L);
925 
926  } else {
927  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
928  col.nulls.add(Boolean.FALSE);
929  }
930 
931  break;
932  case java.sql.Types.DATE:
933  Date d = rs.getDate(colNum);
934  if (rs.wasNull()) {
935  col.nulls.add(Boolean.TRUE);
936  col.data.int_col.add(0L);
937 
938  } else {
939  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
940  col.nulls.add(Boolean.FALSE);
941  }
942  break;
943  case java.sql.Types.FLOAT:
944  case java.sql.Types.DOUBLE:
945  case java.sql.Types.REAL:
946  Double db = rs.getDouble(colNum);
947  if (rs.wasNull()) {
948  col.nulls.add(Boolean.TRUE);
949  col.data.real_col.add(new Double(0));
950 
951  } else {
952  col.nulls.add(Boolean.FALSE);
953  col.data.real_col.add(db);
954  }
955  break;
956 
957  case java.sql.Types.NVARCHAR:
958  case java.sql.Types.VARCHAR:
959  case java.sql.Types.NCHAR:
960  case java.sql.Types.CHAR:
961  case java.sql.Types.LONGVARCHAR:
962  case java.sql.Types.LONGNVARCHAR:
963  String strVal = rs.getString(colNum);
964  if (rs.wasNull()) {
965  col.nulls.add(Boolean.TRUE);
966  col.data.str_col.add("");
967 
968  } else {
969  col.data.str_col.add(strVal);
970  col.nulls.add(Boolean.FALSE);
971  }
972  break;
973  case java.sql.Types.OTHER:
974  if (rs.wasNull()) {
975  col.nulls.add(Boolean.TRUE);
976  col.data.str_col.add("");
977  } else {
978  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
979  col.nulls.add(Boolean.FALSE);
980  }
981  break;
982  default:
983  throw new AssertionError("Column type " + columnType + " not Supported");
984  }
985  }
986 
987  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
988  throws SQLException {
989  col.nulls.clear();
990 
991  switch (md.getColumnType(i)) {
992  case java.sql.Types.TINYINT:
993  case java.sql.Types.SMALLINT:
994  case java.sql.Types.INTEGER:
995  case java.sql.Types.BIGINT:
996  case java.sql.Types.TIME:
997  case java.sql.Types.TIMESTAMP:
998  case java.sql.Types
999  .BIT: // deal with postgress treating boolean as bit... this will bite me
1000  case java.sql.Types.BOOLEAN:
1001  case java.sql.Types.DATE:
1002  case java.sql.Types.DECIMAL:
1003  case java.sql.Types.NUMERIC:
1004  col.data.int_col.clear();
1005  break;
1006 
1007  case java.sql.Types.FLOAT:
1008  case java.sql.Types.DOUBLE:
1009  case java.sql.Types.REAL:
1010  col.data.real_col.clear();
1011  break;
1012 
1013  case java.sql.Types.NVARCHAR:
1014  case java.sql.Types.VARCHAR:
1015  case java.sql.Types.NCHAR:
1016  case java.sql.Types.CHAR:
1017  case java.sql.Types.LONGVARCHAR:
1018  case java.sql.Types.LONGNVARCHAR:
1019  case java.sql.Types.OTHER:
1020  col.data.str_col.clear();
1021  break;
1022  default:
1023  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1024  }
1025  }
1026 }
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
size_t append(FILE *f, const size_t size, int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:135
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
static final Logger LOGGER
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)