OmniSciDB  6686921089
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 import com.omnisci.thrift.server.*;
24 
25 import org.apache.commons.cli.*;
26 import org.apache.thrift.TException;
27 import org.apache.thrift.protocol.TBinaryProtocol;
28 import org.apache.thrift.protocol.TJSONProtocol;
29 import org.apache.thrift.protocol.TProtocol;
30 import org.apache.thrift.transport.TSocket;
31 import org.apache.thrift.transport.TTransport;
32 import org.apache.thrift.transport.TTransportException;
33 import org.slf4j.Logger;
34 import org.slf4j.LoggerFactory;
35 
36 import java.io.BufferedReader;
37 import java.io.FileReader;
38 import java.io.IOException;
39 import java.math.BigDecimal;
40 import java.security.KeyStore;
41 import java.sql.*;
42 import java.time.*;
43 import java.util.ArrayList;
44 import java.util.List;
45 
46 interface DateTimeUtils {
47  long getSecondsFromMilliseconds(long milliseconds);
48 }
49 
50 class MutuallyExlusiveOptionsException extends ParseException {
51  protected MutuallyExlusiveOptionsException(String message) {
52  super(message);
53  }
54 
55  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
56  StringBuffer sb = new StringBuffer(
57  "Mutually exclusive options used. " + errMsg + ". Options provided [");
58  for (String s : strings) {
59  sb.append(s);
60  sb.append(" ");
61  }
62  sb.setCharAt(sb.length() - 1, ']');
63  return new MutuallyExlusiveOptionsException(sb.toString());
64  }
65 }
67  private Options options = new Options();
68 
69  void printVersion() {
70  System.out.println("SQLImporter Version 4.6.0");
71  }
72 
74  StringBuffer sb = new StringBuffer("\nSQLImporter ");
75  // Ready for PKI auth
76  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
77  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
78  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
79  // sb.append("([--ca-trust-store <ca trust store file name>]
80  // --ca-trust-store-password
81  // <trust store password> | --insecure)\n");
82  sb.append(
83  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
84  sb.append(
85  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
86  sb.append(
87  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
88  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
89  sb.append("\nSQLImporter -h | --help\n\n");
90 
91  HelpFormatter formatter = new HelpFormatter();
92  // Forces help to print out options in order they were added rather
93  // than in alphabetical order
94  formatter.setOptionComparator(null);
95  int help_width = 100;
96  formatter.printHelp(help_width, sb.toString(), "", options, "");
97  }
98 
100  options.addOption("r", true, "Row Load Limit");
101 
102  // OmniSci authentication options
103  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
104  options.addOption(
105  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
106  options.addOption(Option.builder("p")
107  .hasArg()
108  .desc("OmniSci Password")
109  .longOpt("passwd")
110  .build());
111  // OmniSci transport options
112  OptionGroup transport_grp = new OptionGroup();
113  transport_grp.addOption(Option.builder()
114  .desc("use binary transport to connect to OmniSci ")
115  .longOpt("binary")
116  .build());
117  transport_grp.addOption(Option.builder()
118  .desc("use http transport to connect to OmniSci ")
119  .longOpt("http")
120  .build());
121  transport_grp.addOption(Option.builder()
122  .desc("use https transport to connect to OmniSci ")
123  .longOpt("https")
124  .build());
125  options.addOptionGroup(transport_grp);
126 
127  // OmniSci database server details
128  options.addOption(Option.builder("s")
129  .hasArg()
130  .desc("OmniSci Server")
131  .longOpt("server")
132  .build());
133  options.addOption(Option.builder("db")
134  .hasArg()
135  .desc("OmniSci Database")
136  .longOpt("database")
137  .build());
138  options.addOption(
139  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
140 
141  // OmniSci server authentication options
142  options.addOption(Option.builder()
143  .hasArg()
144  .desc("CA certificate trust store")
145  .longOpt("ca-trust-store")
146  .build());
147  options.addOption(Option.builder()
148  .hasArg()
149  .desc("CA certificate trust store password")
150  .longOpt("ca-trust-store-passwd")
151  .build());
152  options.addOption(
153  Option.builder()
154  .desc("Insecure TLS - do not validate server OmniSci server credentials")
155  .longOpt("insecure")
156  .build());
157 
158  // Other database connection details
159  options.addOption(Option.builder("d")
160  .hasArg()
161  .desc("JDBC driver class")
162  .longOpt("driver")
163  .build());
164  options.addOption(Option.builder("c")
165  .hasArg()
166  .desc("JDBC Connection string")
167  .longOpt("jdbcConnect")
168  .required()
169  .build());
170  options.addOption(Option.builder("su")
171  .hasArg()
172  .desc("Source User")
173  .longOpt("sourceUser")
174  .required()
175  .build());
176  options.addOption(Option.builder("sp")
177  .hasArg()
178  .desc("Source Password")
179  .longOpt("sourcePasswd")
180  .required()
181  .build());
182  options.addOption(Option.builder("ss")
183  .hasArg()
184  .desc("SQL Select statement")
185  .longOpt("sqlStmt")
186  .required()
187  .build());
188 
189  options.addOption(Option.builder("t")
190  .hasArg()
191  .desc("OmniSci Target Table")
192  .longOpt("targetTable")
193  .required()
194  .build());
195 
196  options.addOption(Option.builder("b")
197  .hasArg()
198  .desc("transfer buffer size")
199  .longOpt("bufferSize")
200  .build());
201  options.addOption(Option.builder("f")
202  .hasArg()
203  .desc("table fragment size")
204  .longOpt("fragmentSize")
205  .build());
206 
207  options.addOption(Option.builder("tr")
208  .desc("Truncate table if it exists")
209  .longOpt("truncate")
210  .build());
211 
212  options.addOption(Option.builder("i")
213  .hasArg()
214  .desc("File containing init command for DB")
215  .longOpt("initializeFile")
216  .build());
217 
218  options.addOption(
219  Option.builder("adtf")
220  .desc("Allow double to float conversion, note precision will be reduced")
221  .longOpt("AllowDoubleToFloat")
222  .build());
223 
224  options.addOption(
225  Option.builder("nprg")
226  .desc("Do not assign Render Groups to Polygons (faster import, but not renderable)")
227  .longOpt("noPolyRenderGroups")
228  .build());
229  }
230 
231  private Option setOptionRequired(Option option) {
232  option.setRequired(true);
233  return option;
234  }
235 
236  public CommandLine parse(String[] args) throws ParseException {
237  CommandLineParser clp = new DefaultParser() {
238  public CommandLine parse(Options options, String[] strings) throws ParseException {
239  Options helpOptions = new Options();
240  helpOptions.addOption(
241  Option.builder("h").desc("help message").longOpt("help").build());
242  try {
243  CommandLine cmd = super.parse(helpOptions, strings);
244  } catch (UnrecognizedOptionException uE) {
245  }
246  if (cmd.hasOption("help")) {
248  exit(0);
249  }
250  if (cmd.hasOption("version")) {
251  printVersion();
252  exit(0);
253  }
254  cmd = super.parse(options, strings);
255  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
256  throw new MissingArgumentException(
257  "Must supply either an OmniSci db user or a user certificate");
258  }
259  // if user supplied must have password and visa versa
260  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
261  options.addOption(setOptionRequired(options.getOption("user")));
262  options.addOption(setOptionRequired(options.getOption("passwd")));
263  super.parse(options, strings);
264  }
265 
266  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
267  // and
268  // visa versa
269  if (false) {
270  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
271  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
272  options.addOption(
273  setOptionRequired(options.getOption("ca-trust-store-password")));
274  super.parse(options, strings);
275  }
276  if (options.getOption("user").isRequired()
277  && options.getOption("client-key").isRequired()) {
279  MutuallyExlusiveOptionsException.create(
280  "user/password can not be use with client-cert/client-key",
281  strings);
282  throw meo;
283  }
284 
285  if (cmd.hasOption("http")
286  || cmd.hasOption("binary")
287  && (cmd.hasOption("client-cert")
288  || cmd.hasOption("client-key"))) {
289  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
290  "http|binary can not be use with ca-cert|client-cert|client-key",
291  strings);
292  }
293  }
294 
295  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
296  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
297  "insecure can only be use with https", strings);
298  throw meo;
299  }
300 
301  return cmd;
302  }
303 
304  public CommandLine parse(Options options, String[] strings, boolean b)
305  throws ParseException {
306  return null;
307  }
308  };
309  return clp.parse(options, args);
310  }
311 }
312 
313 public class SQLImporter {
314  protected String session = null;
315  protected OmniSci.Client client = null;
316  private CommandLine cmd = null;
317  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
318  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
319  return milliseconds / 1000;
320  };
321 
323 
324  public static void main(String[] args) {
325  SQLImporter sq = new SQLImporter();
326  sq.doWork(args);
327  }
328 
329  void doWork(String[] args) {
330  // create Options object
331 
332  SQLImporter_args s_args = new SQLImporter_args();
333 
334  try {
335  cmd = s_args.parse(args);
336  } catch (ParseException ex) {
337  LOGGER.error(ex.getLocalizedMessage());
338  s_args.printHelpMessage();
339  exit(0);
340  }
341  executeQuery();
342  }
343 
344  void executeQuery() {
345  Connection conn = null;
346  Statement stmt = null;
347 
348  long totalTime = 0;
349 
350  try {
351  // Open a connection
352  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
353  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
354  cmd.getOptionValue("sourceUser"),
355  cmd.getOptionValue("sourcePasswd"));
356  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
357  long startTime = System.currentTimeMillis();
358 
359  // run init file script on targe DB if present
360  if (cmd.hasOption("initializeFile")) {
361  run_init(conn);
362  }
363 
364  try {
366  conn.setAutoCommit(false);
367  }
368  } catch (SQLException se) {
369  LOGGER.warn(
370  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
371  + se.toString());
372  }
373 
374  // Execute a query
375  stmt = conn.createStatement();
376 
377  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
378  // set the jdbc fetch buffer size to reduce the amount of records being moved to
379  // java from postgress
380  stmt.setFetchSize(bufferSize);
381  long timer;
382 
383  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
384 
385  // check if table already exists and is compatible in OmniSci with the query
386  // metadata
387  ResultSetMetaData md = rs.getMetaData();
388  checkMapDTable(conn, md);
389 
390  timer = System.currentTimeMillis();
391 
392  long resultCount = 0;
393  int bufferCount = 0;
394  long total = 0;
395 
396  List<TColumn> cols = new ArrayList(md.getColumnCount());
397  for (int i = 1; i <= md.getColumnCount(); i++) {
398  TColumn col = setupBinaryColumn(i, md, bufferSize);
399  cols.add(col);
400  }
401 
402  boolean assignRenderGroups = !cmd.hasOption("noPolyRenderGroups");
403 
404  // read data from old DB
405  while (rs.next()) {
406  for (int i = 1; i <= md.getColumnCount(); i++) {
407  setColValue(rs,
408  cols.get(i - 1),
409  md.getColumnType(i),
410  i,
411  md.getScale(i),
412  md.getColumnTypeName(i));
413  }
414  resultCount++;
415  bufferCount++;
416  if (bufferCount == bufferSize) {
417  bufferCount = 0;
418  // send the buffer to mapD
419  if (assignRenderGroups) {
420  client.load_table_binary_columnar_polys(
421  session, cmd.getOptionValue("targetTable"), cols, null, true);
422  } else {
423  client.load_table_binary_columnar(
424  session, cmd.getOptionValue("targetTable"), cols, null);
425  }
426  // recreate columnar store for use
427  for (int i = 1; i <= md.getColumnCount(); i++) {
428  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
429  }
430 
431  if (resultCount % 100000 == 0) {
432  LOGGER.info("Imported " + resultCount + " records");
433  }
434  }
435  }
436  if (bufferCount > 0) {
437  // send the LAST buffer to mapD
438  if (assignRenderGroups) {
439  client.load_table_binary_columnar_polys(
440  session, cmd.getOptionValue("targetTable"), cols, null, true);
441  } else {
442  client.load_table_binary_columnar(
443  session, cmd.getOptionValue("targetTable"), cols, null);
444  }
445  bufferCount = 0;
446  }
447 
448  // dump render group assignment data immediately
449  if (assignRenderGroups) {
450  client.load_table_binary_columnar_polys(
451  session, cmd.getOptionValue("targetTable"), null, null, false);
452  }
453 
454  LOGGER.info("result set count is " + resultCount + " read time is "
455  + (System.currentTimeMillis() - timer) + "ms");
456 
457  // Clean-up environment
458  rs.close();
459  stmt.close();
460  conn.close();
461 
462  totalTime = System.currentTimeMillis() - startTime;
463  } catch (SQLException se) {
464  LOGGER.error("SQLException - " + se.toString());
465  se.printStackTrace();
466  } catch (TOmniSciException ex) {
467  LOGGER.error("TOmniSciException - " + ex.getError_msg());
468  ex.printStackTrace();
469  } catch (TException ex) {
470  LOGGER.error("TException failed - " + ex.toString());
471  ex.printStackTrace();
472  } finally {
473  // finally block used to close resources
474  try {
475  if (stmt != null) {
476  stmt.close();
477  }
478  } catch (SQLException se2) {
479  } // nothing we can do
480  try {
481  if (conn != null) {
482  conn.close();
483  }
484  } catch (SQLException se) {
485  LOGGER.error("SQlException in close - " + se.toString());
486  se.printStackTrace();
487  }
488  try {
489  if (session != null) {
490  client.disconnect(session);
491  }
492  } catch (TOmniSciException ex) {
493  LOGGER.error("TOmniSciException - in finalization " + ex.getError_msg());
494  ex.printStackTrace();
495  } catch (TException ex) {
496  LOGGER.error("TException - in finalization" + ex.toString());
497  ex.printStackTrace();
498  }
499  }
500  }
501 
502  private void run_init(Connection conn) {
503  // attempt to open file
504  String line = "";
505  try {
506  BufferedReader reader =
507  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
508  Statement stmt = conn.createStatement();
509  while ((line = reader.readLine()) != null) {
510  if (line.isEmpty()) {
511  continue;
512  }
513  LOGGER.info("Running : " + line);
514  stmt.execute(line);
515  }
516  stmt.close();
517  reader.close();
518  } catch (IOException e) {
519  LOGGER.error("Exception occurred trying to read initialize file: "
520  + cmd.getOptionValue("initFile"));
521  exit(1);
522  } catch (SQLException e) {
523  LOGGER.error(
524  "Exception occurred trying to execute initialize file entry : " + line);
525  exit(1);
526  }
527  }
528 
529  private void help(Options options) {
530  // automatically generate the help statement
531  HelpFormatter formatter = new HelpFormatter();
532  formatter.setOptionComparator(null); // get options in the order they are created
533  formatter.printHelp("SQLImporter", options);
534  }
535 
536  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
537  throws SQLException {
539  String tName = cmd.getOptionValue("targetTable");
540 
541  if (tableExists(tName)) {
542  // check if we want to truncate
543  if (cmd.hasOption("truncate")) {
544  executeMapDCommand("Drop table " + tName);
545  createMapDTable(otherdb_conn, md);
546  } else {
547  List<TColumnType> columnInfo = getColumnInfo(tName);
548  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
549  }
550  } else {
551  createMapDTable(otherdb_conn, md);
552  }
553  }
554 
555  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
556  List<TColumnType> dstColumns,
557  ResultSetMetaData srcColumns) throws SQLException {
558  if (srcColumns.getColumnCount() != dstColumns.size()) {
559  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
560  + " versus Source " + srcColumns.getColumnCount());
561  exit(1);
562  }
563  for (int i = 1; i <= dstColumns.size(); ++i) {
564  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
565  srcColumns.getColumnName(i))) {
566  LOGGER.error(
567  "Destination table does not have matching column in same order for column number "
568  + i + " destination column name is " + dstColumns.get(i - 1).col_name
569  + " versus target column " + srcColumns.getColumnName(i));
570  exit(1);
571  }
572  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
573  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
574  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
575  int srcType = srcColumns.getColumnType(i);
576  int srcPrecision = srcColumns.getPrecision(i);
577  int srcScale = srcColumns.getScale(i);
578 
579  boolean match = false;
580  switch (srcType) {
581  case java.sql.Types.TINYINT:
582  match |= dstType == TDatumType.TINYINT;
583  // NOTE: it's okay to import smaller type to a bigger one,
584  // so we just fall through and try to match the next type.
585  // But the order of case statements is important here!
586  case java.sql.Types.SMALLINT:
587  match |= dstType == TDatumType.SMALLINT;
588  case java.sql.Types.INTEGER:
589  match |= dstType == TDatumType.INT;
590  case java.sql.Types.BIGINT:
591  match |= dstType == TDatumType.BIGINT;
592  break;
593  case java.sql.Types.DECIMAL:
594  case java.sql.Types.NUMERIC:
595  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
596  && dstScale == srcScale;
597  break;
598  case java.sql.Types.FLOAT:
599  case java.sql.Types.REAL:
600  match |= dstType == TDatumType.FLOAT;
601  // Fall through and try double
602  case java.sql.Types.DOUBLE:
603  match |= dstType == TDatumType.DOUBLE;
604  if (cmd.hasOption("AllowDoubleToFloat")) {
605  match |= dstType == TDatumType.FLOAT;
606  }
607  break;
608  case java.sql.Types.TIME:
609  match = dstType == TDatumType.TIME;
610  break;
611  case java.sql.Types.TIMESTAMP:
612  match = dstType == TDatumType.TIMESTAMP;
613  break;
614  case java.sql.Types.DATE:
615  match = dstType == TDatumType.DATE;
616  break;
617  case java.sql.Types.BOOLEAN:
618  case java.sql.Types
619  .BIT: // deal with postgres treating boolean as bit... this will bite me
620  match = dstType == TDatumType.BOOL;
621  break;
622  case java.sql.Types.NVARCHAR:
623  case java.sql.Types.VARCHAR:
624  case java.sql.Types.NCHAR:
625  case java.sql.Types.CHAR:
626  case java.sql.Types.LONGVARCHAR:
627  case java.sql.Types.LONGNVARCHAR:
628  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
629  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
630  || dstType == TDatumType.LINESTRING);
631  break;
632  case java.sql.Types.OTHER:
633  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
634  // it makes no difference for OmniSciDB at the moment
635  Db_vendor_types.GisType gisType =
636  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
637  if (gisType.srid != dstScale) {
638  match = false;
639  break;
640  }
641  switch (dstType) {
642  case POINT:
643  match = gisType.type.equalsIgnoreCase("POINT");
644  break;
645  case LINESTRING:
646  match = gisType.type.equalsIgnoreCase("LINESTRING");
647  break;
648  case POLYGON:
649  match = gisType.type.equalsIgnoreCase("POLYGON");
650  break;
651  case MULTIPOLYGON:
652  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
653  break;
654  default:
655  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
656  + " not Supported");
657  exit(1);
658  }
659  break;
660  default:
661  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
662  + " not Supported");
663  exit(1);
664  }
665  if (!match) {
666  LOGGER.error("Source and destination types for column "
667  + srcColumns.getColumnName(i)
668  + " do not match. Please make sure that type, precision and scale are exactly the same");
669  exit(1);
670  }
671  }
672  }
673 
674  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
675  StringBuilder sb = new StringBuilder();
676  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
677 
678  // Now iterate the metadata
679  try {
680  for (int i = 1; i <= metaData.getColumnCount(); i++) {
681  if (i > 1) {
682  sb.append(",");
683  }
684  LOGGER.debug("Column name is " + metaData.getColumnName(i));
685  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
686  LOGGER.debug("Column type is " + metaData.getColumnType(i));
687 
688  sb.append(metaData.getColumnName(i)).append(" ");
689  int col_type = metaData.getColumnType(i);
690  if (col_type == java.sql.Types.OTHER) {
691  Db_vendor_types.GisType type =
692  vendor_types.find_gis_type(otherdb_conn, metaData, i);
693  sb.append(Db_vendor_types.gis_type_to_str(type));
694  } else {
695  sb.append(getColType(metaData.getColumnType(i),
696  metaData.getPrecision(i),
697  metaData.getScale(i)));
698  }
699  }
700  sb.append(")");
701 
702  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
703  sb.append(" with (fragment_size = ");
704  sb.append(cmd.getOptionValue("fragmentSize", "0"));
705  sb.append(")");
706  }
707 
708  } catch (SQLException ex) {
709  LOGGER.error("Error processing the metadata - " + ex.toString());
710  exit(1);
711  }
712 
713  executeMapDCommand(sb.toString());
714  }
715 
716  private void createMapDConnection() {
717  TTransport transport = null;
718  TProtocol protocol = new TBinaryProtocol(transport);
719  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
720  String server = cmd.getOptionValue("server", "localhost");
721  try {
722  // Uses default certificate stores.
723  boolean load_trust_store = cmd.hasOption("https");
724  SockTransportProperties skT = null;
725  if (cmd.hasOption("https")) {
726  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
727  !cmd.hasOption("insecure"));
728  transport = skT.openHttpsClientTransport(server, port);
729  transport.open();
730  protocol = new TJSONProtocol(transport);
731  } else if (cmd.hasOption("http")) {
732  skT = SockTransportProperties.getUnencryptedClient();
733  transport = skT.openHttpClientTransport(server, port);
734  protocol = new TJSONProtocol(transport);
735  } else {
736  skT = SockTransportProperties.getUnencryptedClient();
737  transport = skT.openClientTransport(server, port);
738  transport.open();
739  protocol = new TBinaryProtocol(transport);
740  }
741 
742  client = new OmniSci.Client(protocol);
743  // This if will be useless until PKI signon
744  if (cmd.hasOption("user")) {
745  session = client.connect(cmd.getOptionValue("user", "admin"),
746  cmd.getOptionValue("passwd", "HyperInteractive"),
747  cmd.getOptionValue("database", "omnisci"));
748  }
749  LOGGER.debug("Connected session is " + session);
750 
751  } catch (TTransportException ex) {
752  LOGGER.error("Connection failed - " + ex.toString());
753  exit(1);
754  } catch (TOmniSciException ex) {
755  LOGGER.error("Connection failed - " + ex.getError_msg());
756  exit(2);
757  } catch (TException ex) {
758  LOGGER.error("Connection failed - " + ex.toString());
759  exit(3);
760  } catch (Exception ex) {
761  LOGGER.error("General exception - " + ex.toString());
762  exit(4);
763  }
764  }
765 
766  private List<TColumnType> getColumnInfo(String tName) {
767  LOGGER.debug("Getting columns for " + tName);
768  List<TColumnType> row_descriptor = null;
769  try {
770  TTableDetails table_details = client.get_table_details(session, tName);
771  row_descriptor = table_details.row_desc;
772  } catch (TOmniSciException ex) {
773  LOGGER.error("column check failed - " + ex.getError_msg());
774  exit(3);
775  } catch (TException ex) {
776  LOGGER.error("column check failed - " + ex.toString());
777  exit(3);
778  }
779  return row_descriptor;
780  }
781 
782  private boolean tableExists(String tName) {
783  LOGGER.debug("Check for table " + tName);
784  try {
785  List<String> recv_get_tables = client.get_tables(session);
786  for (String s : recv_get_tables) {
787  if (s.equals(tName)) {
788  return true;
789  }
790  }
791  } catch (TOmniSciException ex) {
792  LOGGER.error("Table check failed - " + ex.getError_msg());
793  exit(3);
794  } catch (TException ex) {
795  LOGGER.error("Table check failed - " + ex.toString());
796  exit(3);
797  }
798  return false;
799  }
800 
801  private void executeMapDCommand(String sql) {
802  LOGGER.info("Run Command - " + sql);
803 
804  try {
805  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
806  } catch (TOmniSciException ex) {
807  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
808  exit(1);
809  } catch (TException ex) {
810  LOGGER.error("SQL Execute failed - " + ex.toString());
811  exit(1);
812  }
813  }
814 
815  private String getColType(int cType, int precision, int scale) {
816  // Note - if cType is OTHER a earlier call will have been made
817  // to try and work out the db vendors specific type.
818  if (precision > 19) {
819  precision = 19;
820  }
821  if (scale > 19) {
822  scale = 18;
823  }
824  switch (cType) {
825  case java.sql.Types.TINYINT:
826  return ("TINYINT");
827  case java.sql.Types.SMALLINT:
828  return ("SMALLINT");
829  case java.sql.Types.INTEGER:
830  return ("INTEGER");
831  case java.sql.Types.BIGINT:
832  return ("BIGINT");
833  case java.sql.Types.FLOAT:
834  return ("FLOAT");
835  case java.sql.Types.DECIMAL:
836  return ("DECIMAL(" + precision + "," + scale + ")");
837  case java.sql.Types.DOUBLE:
838  return ("DOUBLE");
839  case java.sql.Types.REAL:
840  return ("REAL");
841  case java.sql.Types.NUMERIC:
842  return ("NUMERIC(" + precision + "," + scale + ")");
843  case java.sql.Types.TIME:
844  return ("TIME");
845  case java.sql.Types.TIMESTAMP:
846  return ("TIMESTAMP");
847  case java.sql.Types.DATE:
848  return ("DATE");
849  case java.sql.Types.BOOLEAN:
850  case java.sql.Types
851  .BIT: // deal with postgress treating boolean as bit... this will bite me
852  return ("BOOLEAN");
853  case java.sql.Types.NVARCHAR:
854  case java.sql.Types.VARCHAR:
855  case java.sql.Types.NCHAR:
856  case java.sql.Types.CHAR:
857  case java.sql.Types.LONGVARCHAR:
858  case java.sql.Types.LONGNVARCHAR:
859  return ("TEXT ENCODING DICT");
860  default:
861  throw new AssertionError("Column type " + cType + " not Supported");
862  }
863  }
864 
865  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
866  throws SQLException {
867  TColumn col = new TColumn();
868 
869  col.nulls = new ArrayList<Boolean>(bufferSize);
870 
871  col.data = new TColumnData();
872 
873  switch (md.getColumnType(i)) {
874  case java.sql.Types.TINYINT:
875  case java.sql.Types.SMALLINT:
876  case java.sql.Types.INTEGER:
877  case java.sql.Types.BIGINT:
878  case java.sql.Types.TIME:
879  case java.sql.Types.TIMESTAMP:
880  case java.sql.Types
881  .BIT: // deal with postgress treating boolean as bit... this will bite me
882  case java.sql.Types.BOOLEAN:
883  case java.sql.Types.DATE:
884  case java.sql.Types.DECIMAL:
885  case java.sql.Types.NUMERIC:
886  col.data.int_col = new ArrayList<Long>(bufferSize);
887  break;
888 
889  case java.sql.Types.FLOAT:
890  case java.sql.Types.DOUBLE:
891  case java.sql.Types.REAL:
892  col.data.real_col = new ArrayList<Double>(bufferSize);
893  break;
894 
895  case java.sql.Types.NVARCHAR:
896  case java.sql.Types.VARCHAR:
897  case java.sql.Types.NCHAR:
898  case java.sql.Types.CHAR:
899  case java.sql.Types.LONGVARCHAR:
900  case java.sql.Types.LONGNVARCHAR:
901  case java.sql.Types.OTHER:
902  col.data.str_col = new ArrayList<String>(bufferSize);
903  break;
904 
905  default:
906  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
907  }
908  return col;
909  }
910 
911  private void setColValue(ResultSet rs,
912  TColumn col,
913  int columnType,
914  int colNum,
915  int scale,
916  String colTypeName) throws SQLException {
917  switch (columnType) {
918  case java.sql.Types
919  .BIT: // deal with postgress treating boolean as bit... this will bite me
920  case java.sql.Types.BOOLEAN:
921  Boolean b = rs.getBoolean(colNum);
922  if (rs.wasNull()) {
923  col.nulls.add(Boolean.TRUE);
924  col.data.int_col.add(0L);
925  } else {
926  col.nulls.add(Boolean.FALSE);
927  col.data.int_col.add(b ? 1L : 0L);
928  }
929  break;
930 
931  case java.sql.Types.DECIMAL:
932  case java.sql.Types.NUMERIC:
933  BigDecimal bd = rs.getBigDecimal(colNum);
934  if (rs.wasNull()) {
935  col.nulls.add(Boolean.TRUE);
936  col.data.int_col.add(0L);
937  } else {
938  col.nulls.add(Boolean.FALSE);
939  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
940  }
941  break;
942 
943  case java.sql.Types.TINYINT:
944  case java.sql.Types.SMALLINT:
945  case java.sql.Types.INTEGER:
946  case java.sql.Types.BIGINT:
947  Long l = rs.getLong(colNum);
948  if (rs.wasNull()) {
949  col.nulls.add(Boolean.TRUE);
950  col.data.int_col.add(new Long(0));
951  } else {
952  col.nulls.add(Boolean.FALSE);
953  col.data.int_col.add(l);
954  }
955  break;
956 
957  case java.sql.Types.TIME:
958  Time t = rs.getTime(colNum);
959  if (rs.wasNull()) {
960  col.nulls.add(Boolean.TRUE);
961  col.data.int_col.add(0L);
962 
963  } else {
964  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
965  col.nulls.add(Boolean.FALSE);
966  }
967 
968  break;
969  case java.sql.Types.TIMESTAMP:
970  Timestamp ts = rs.getTimestamp(colNum);
971  if (rs.wasNull()) {
972  col.nulls.add(Boolean.TRUE);
973  col.data.int_col.add(0L);
974 
975  } else {
976  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
977  col.nulls.add(Boolean.FALSE);
978  }
979 
980  break;
981  case java.sql.Types.DATE:
982  Date d = rs.getDate(colNum);
983  if (rs.wasNull()) {
984  col.nulls.add(Boolean.TRUE);
985  col.data.int_col.add(0L);
986 
987  } else {
988  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
989  col.nulls.add(Boolean.FALSE);
990  }
991  break;
992  case java.sql.Types.FLOAT:
993  case java.sql.Types.DOUBLE:
994  case java.sql.Types.REAL:
995  Double db = rs.getDouble(colNum);
996  if (rs.wasNull()) {
997  col.nulls.add(Boolean.TRUE);
998  col.data.real_col.add(new Double(0));
999 
1000  } else {
1001  col.nulls.add(Boolean.FALSE);
1002  col.data.real_col.add(db);
1003  }
1004  break;
1005 
1006  case java.sql.Types.NVARCHAR:
1007  case java.sql.Types.VARCHAR:
1008  case java.sql.Types.NCHAR:
1009  case java.sql.Types.CHAR:
1010  case java.sql.Types.LONGVARCHAR:
1011  case java.sql.Types.LONGNVARCHAR:
1012  String strVal = rs.getString(colNum);
1013  if (rs.wasNull()) {
1014  col.nulls.add(Boolean.TRUE);
1015  col.data.str_col.add("");
1016 
1017  } else {
1018  col.data.str_col.add(strVal);
1019  col.nulls.add(Boolean.FALSE);
1020  }
1021  break;
1022  case java.sql.Types.OTHER:
1023  Object objVal = rs.getObject(colNum);
1024  if (rs.wasNull()) {
1025  col.nulls.add(Boolean.TRUE);
1026  col.data.str_col.add("");
1027  } else {
1028  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1029  col.nulls.add(Boolean.FALSE);
1030  }
1031  break;
1032  default:
1033  throw new AssertionError("Column type " + columnType + " not Supported");
1034  }
1035  }
1036 
1037  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1038  throws SQLException {
1039  col.nulls.clear();
1040 
1041  switch (md.getColumnType(i)) {
1042  case java.sql.Types.TINYINT:
1043  case java.sql.Types.SMALLINT:
1044  case java.sql.Types.INTEGER:
1045  case java.sql.Types.BIGINT:
1046  case java.sql.Types.TIME:
1047  case java.sql.Types.TIMESTAMP:
1048  case java.sql.Types
1049  .BIT: // deal with postgress treating boolean as bit... this will bite me
1050  case java.sql.Types.BOOLEAN:
1051  case java.sql.Types.DATE:
1052  case java.sql.Types.DECIMAL:
1053  case java.sql.Types.NUMERIC:
1054  col.data.int_col.clear();
1055  break;
1056 
1057  case java.sql.Types.FLOAT:
1058  case java.sql.Types.DOUBLE:
1059  case java.sql.Types.REAL:
1060  col.data.real_col.clear();
1061  break;
1062 
1063  case java.sql.Types.NVARCHAR:
1064  case java.sql.Types.VARCHAR:
1065  case java.sql.Types.NCHAR:
1066  case java.sql.Types.CHAR:
1067  case java.sql.Types.LONGVARCHAR:
1068  case java.sql.Types.LONGNVARCHAR:
1069  case java.sql.Types.OTHER:
1070  col.data.str_col.clear();
1071  break;
1072  default:
1073  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1074  }
1075  }
1076 }
#define LINESTRING
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:159
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
#define SMALLINT
#define DOUBLE
#define BIGINT
void help(Options options)
#define DATE
static void main(String[] args)
#define MULTIPOLYGON
#define POINT
void doWork(String[] args)
#define TIME
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
#define TINYINT
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
#define TIMESTAMP
boolean tableExists(String tName)
char * t
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
#define POLYGON
#define FLOAT
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)