OmniSciDB  16c4e035a1
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
23 import com.omnisci.thrift.server.*;
24 
25 import org.apache.commons.cli.*;
26 import org.apache.thrift.TException;
27 import org.apache.thrift.protocol.TBinaryProtocol;
28 import org.apache.thrift.protocol.TJSONProtocol;
29 import org.apache.thrift.protocol.TProtocol;
30 import org.apache.thrift.transport.TSocket;
31 import org.apache.thrift.transport.TTransport;
32 import org.apache.thrift.transport.TTransportException;
33 import org.slf4j.Logger;
34 import org.slf4j.LoggerFactory;
35 
36 import java.io.BufferedReader;
37 import java.io.FileReader;
38 import java.io.IOException;
39 import java.math.BigDecimal;
40 import java.security.KeyStore;
41 import java.sql.*;
42 import java.time.*;
43 import java.util.ArrayList;
44 import java.util.List;
45 
46 interface DateTimeUtils {
47  long getSecondsFromMilliseconds(long milliseconds);
48 }
49 
50 class MutuallyExlusiveOptionsException extends ParseException {
51  protected MutuallyExlusiveOptionsException(String message) {
52  super(message);
53  }
54 
55  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
56  StringBuffer sb = new StringBuffer(
57  "Mutually exclusive options used. " + errMsg + ". Options provided [");
58  for (String s : strings) {
59  sb.append(s);
60  sb.append(" ");
61  }
62  sb.setCharAt(sb.length() - 1, ']');
63  return new MutuallyExlusiveOptionsException(sb.toString());
64  }
65 }
67  private Options options = new Options();
68 
69  void printVersion() {
70  System.out.println("SQLImporter Version 4.6.0");
71  }
72 
74  StringBuffer sb = new StringBuffer("\nSQLImporter ");
75  // Ready for PKI auth
76  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
77  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
78  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
79  // sb.append("([--ca-trust-store <ca trust store file name>]
80  // --ca-trust-store-password
81  // <trust store password> | --insecure)\n");
82  sb.append(
83  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
84  sb.append(
85  "-su <other database user> -sp <other database user password> -ss <other database sql statement>\n");
86  sb.append(
87  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
88  sb.append("[-tr] [-adtf] [-nprg] -i <init commands file>\n");
89  sb.append("\nSQLImporter -h | --help\n\n");
90 
91  HelpFormatter formatter = new HelpFormatter();
92  // Forces help to print out options in order they were added rather
93  // than in alphabetical order
94  formatter.setOptionComparator(null);
95  int help_width = 100;
96  formatter.printHelp(help_width, sb.toString(), "", options, "");
97  }
98 
100  options.addOption("r", true, "Row Load Limit");
101 
102  // OmniSci authentication options
103  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
104  options.addOption(
105  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
106  options.addOption(Option.builder("p")
107  .hasArg()
108  .desc("OmniSci Password")
109  .longOpt("passwd")
110  .build());
111  // OmniSci transport options
112  OptionGroup transport_grp = new OptionGroup();
113  transport_grp.addOption(Option.builder()
114  .desc("use binary transport to connect to OmniSci ")
115  .longOpt("binary")
116  .build());
117  transport_grp.addOption(Option.builder()
118  .desc("use http transport to connect to OmniSci ")
119  .longOpt("http")
120  .build());
121  transport_grp.addOption(Option.builder()
122  .desc("use https transport to connect to OmniSci ")
123  .longOpt("https")
124  .build());
125  options.addOptionGroup(transport_grp);
126 
127  // OmniSci database server details
128  options.addOption(Option.builder("s")
129  .hasArg()
130  .desc("OmniSci Server")
131  .longOpt("server")
132  .build());
133  options.addOption(Option.builder("db")
134  .hasArg()
135  .desc("OmniSci Database")
136  .longOpt("database")
137  .build());
138  options.addOption(
139  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
140 
141  // OmniSci server authentication options
142  options.addOption(Option.builder()
143  .hasArg()
144  .desc("CA certificate trust store")
145  .longOpt("ca-trust-store")
146  .build());
147  options.addOption(Option.builder()
148  .hasArg()
149  .desc("CA certificate trust store password")
150  .longOpt("ca-trust-store-passwd")
151  .build());
152  options.addOption(
153  Option.builder()
154  .desc("Insecure TLS - do not validate server OmniSci server credentials")
155  .longOpt("insecure")
156  .build());
157 
158  // Other database connection details
159  options.addOption(Option.builder("d")
160  .hasArg()
161  .desc("JDBC driver class")
162  .longOpt("driver")
163  .build());
164  options.addOption(Option.builder("c")
165  .hasArg()
166  .desc("JDBC Connection string")
167  .longOpt("jdbcConnect")
168  .required()
169  .build());
170  options.addOption(Option.builder("su")
171  .hasArg()
172  .desc("Source User")
173  .longOpt("sourceUser")
174  .required()
175  .build());
176  options.addOption(Option.builder("sp")
177  .hasArg()
178  .desc("Source Password")
179  .longOpt("sourcePasswd")
180  .required()
181  .build());
182  options.addOption(Option.builder("ss")
183  .hasArg()
184  .desc("SQL Select statement")
185  .longOpt("sqlStmt")
186  .required()
187  .build());
188 
189  options.addOption(Option.builder("t")
190  .hasArg()
191  .desc("OmniSci Target Table")
192  .longOpt("targetTable")
193  .required()
194  .build());
195 
196  options.addOption(Option.builder("b")
197  .hasArg()
198  .desc("transfer buffer size")
199  .longOpt("bufferSize")
200  .build());
201  options.addOption(Option.builder("f")
202  .hasArg()
203  .desc("table fragment size")
204  .longOpt("fragmentSize")
205  .build());
206 
207  options.addOption(Option.builder("tr")
208  .desc("Truncate table if it exists")
209  .longOpt("truncate")
210  .build());
211 
212  options.addOption(Option.builder("i")
213  .hasArg()
214  .desc("File containing init command for DB")
215  .longOpt("initializeFile")
216  .build());
217 
218  options.addOption(
219  Option.builder("adtf")
220  .desc("Allow double to float conversion, note precision will be reduced")
221  .longOpt("AllowDoubleToFloat")
222  .build());
223 
224  options.addOption(
225  Option.builder("ain")
226  .desc("Allow conversion from bigger integer types to smaller. Overflow might occur, "
227  + "use it only when casting is impossible")
228  .longOpt("AllowIntegerNarrowing")
229  .build());
230 
231  options.addOption(
232  Option.builder("nprg")
233  .desc("Do not assign Render Groups to Polygons (faster import, but not renderable)")
234  .longOpt("noPolyRenderGroups")
235  .build());
236  }
237 
238  private Option setOptionRequired(Option option) {
239  option.setRequired(true);
240  return option;
241  }
242 
243  public CommandLine parse(String[] args) throws ParseException {
244  CommandLineParser clp = new DefaultParser() {
245  public CommandLine parse(Options options, String[] strings) throws ParseException {
246  Options helpOptions = new Options();
247  helpOptions.addOption(
248  Option.builder("h").desc("help message").longOpt("help").build());
249  try {
250  CommandLine cmd = super.parse(helpOptions, strings);
251  } catch (UnrecognizedOptionException uE) {
252  }
253  if (cmd.hasOption("help")) {
255  exit(0);
256  }
257  if (cmd.hasOption("version")) {
258  printVersion();
259  exit(0);
260  }
261  cmd = super.parse(options, strings);
262  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
263  throw new MissingArgumentException(
264  "Must supply either an OmniSci db user or a user certificate");
265  }
266  // if user supplied must have password and visa versa
267  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
268  options.addOption(setOptionRequired(options.getOption("user")));
269  options.addOption(setOptionRequired(options.getOption("passwd")));
270  super.parse(options, strings);
271  }
272 
273  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
274  // and
275  // visa versa
276  if (false) {
277  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
278  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
279  options.addOption(
280  setOptionRequired(options.getOption("ca-trust-store-password")));
281  super.parse(options, strings);
282  }
283  if (options.getOption("user").isRequired()
284  && options.getOption("client-key").isRequired()) {
286  MutuallyExlusiveOptionsException.create(
287  "user/password can not be use with client-cert/client-key",
288  strings);
289  throw meo;
290  }
291 
292  if (cmd.hasOption("http")
293  || cmd.hasOption("binary")
294  && (cmd.hasOption("client-cert")
295  || cmd.hasOption("client-key"))) {
296  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
297  "http|binary can not be use with ca-cert|client-cert|client-key",
298  strings);
299  }
300  }
301 
302  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
303  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
304  "insecure can only be use with https", strings);
305  throw meo;
306  }
307 
308  return cmd;
309  }
310 
311  public CommandLine parse(Options options, String[] strings, boolean b)
312  throws ParseException {
313  return null;
314  }
315  };
316  return clp.parse(options, args);
317  }
318 }
319 
320 public class SQLImporter {
321  protected String session = null;
322  protected OmniSci.Client client = null;
323  private CommandLine cmd = null;
324  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
325  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
326  return milliseconds / 1000;
327  };
328 
330 
331  public static void main(String[] args) {
332  SQLImporter sq = new SQLImporter();
333  sq.doWork(args);
334  }
335 
336  void doWork(String[] args) {
337  // create Options object
338 
339  SQLImporter_args s_args = new SQLImporter_args();
340 
341  try {
342  cmd = s_args.parse(args);
343  } catch (ParseException ex) {
344  LOGGER.error(ex.getLocalizedMessage());
345  s_args.printHelpMessage();
346  exit(0);
347  }
348  executeQuery();
349  }
350 
351  void executeQuery() {
352  Connection conn = null;
353  Statement stmt = null;
354 
355  long totalTime = 0;
356 
357  try {
358  // Open a connection
359  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
360  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
361  cmd.getOptionValue("sourceUser"),
362  cmd.getOptionValue("sourcePasswd"));
363  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
364  long startTime = System.currentTimeMillis();
365 
366  // run init file script on targe DB if present
367  if (cmd.hasOption("initializeFile")) {
368  run_init(conn);
369  }
370 
371  try {
373  conn.setAutoCommit(false);
374  }
375  } catch (SQLException se) {
376  LOGGER.warn(
377  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
378  + se.toString());
379  }
380 
381  // Execute a query
382  stmt = conn.createStatement();
383 
384  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
385  // set the jdbc fetch buffer size to reduce the amount of records being moved to
386  // java from postgress
387  stmt.setFetchSize(bufferSize);
388  long timer;
389 
390  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
391 
392  // check if table already exists and is compatible in OmniSci with the query
393  // metadata
394  ResultSetMetaData md = rs.getMetaData();
395  checkMapDTable(conn, md);
396 
397  timer = System.currentTimeMillis();
398 
399  long resultCount = 0;
400  int bufferCount = 0;
401  long total = 0;
402 
403  List<TColumn> cols = new ArrayList(md.getColumnCount());
404  for (int i = 1; i <= md.getColumnCount(); i++) {
405  TColumn col = setupBinaryColumn(i, md, bufferSize);
406  cols.add(col);
407  }
408 
409  boolean assignRenderGroups = !cmd.hasOption("noPolyRenderGroups");
410 
411  // read data from old DB
412  while (rs.next()) {
413  for (int i = 1; i <= md.getColumnCount(); i++) {
414  setColValue(rs,
415  cols.get(i - 1),
416  md.getColumnType(i),
417  i,
418  md.getScale(i),
419  md.getColumnTypeName(i));
420  }
421  resultCount++;
422  bufferCount++;
423  if (bufferCount == bufferSize) {
424  bufferCount = 0;
425  // send the buffer to mapD
426  if (assignRenderGroups) {
427  client.load_table_binary_columnar_polys(
428  session, cmd.getOptionValue("targetTable"), cols, null, true);
429  } else {
430  client.load_table_binary_columnar(
431  session, cmd.getOptionValue("targetTable"), cols, null);
432  }
433  // recreate columnar store for use
434  for (int i = 1; i <= md.getColumnCount(); i++) {
435  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
436  }
437 
438  if (resultCount % 100000 == 0) {
439  LOGGER.info("Imported " + resultCount + " records");
440  }
441  }
442  }
443  if (bufferCount > 0) {
444  // send the LAST buffer to mapD
445  if (assignRenderGroups) {
446  client.load_table_binary_columnar_polys(
447  session, cmd.getOptionValue("targetTable"), cols, null, true);
448  } else {
449  client.load_table_binary_columnar(
450  session, cmd.getOptionValue("targetTable"), cols, null);
451  }
452  bufferCount = 0;
453  }
454 
455  // dump render group assignment data immediately
456  if (assignRenderGroups) {
457  client.load_table_binary_columnar_polys(
458  session, cmd.getOptionValue("targetTable"), null, null, false);
459  }
460 
461  LOGGER.info("result set count is " + resultCount + " read time is "
462  + (System.currentTimeMillis() - timer) + "ms");
463 
464  // Clean-up environment
465  rs.close();
466  stmt.close();
467  conn.close();
468 
469  totalTime = System.currentTimeMillis() - startTime;
470  } catch (SQLException se) {
471  LOGGER.error("SQLException - " + se.toString());
472  se.printStackTrace();
473  } catch (TOmniSciException ex) {
474  LOGGER.error("TOmniSciException - " + ex.getError_msg());
475  ex.printStackTrace();
476  } catch (TException ex) {
477  LOGGER.error("TException failed - " + ex.toString());
478  ex.printStackTrace();
479  } finally {
480  // finally block used to close resources
481  try {
482  if (stmt != null) {
483  stmt.close();
484  }
485  } catch (SQLException se2) {
486  } // nothing we can do
487  try {
488  if (conn != null) {
489  conn.close();
490  }
491  } catch (SQLException se) {
492  LOGGER.error("SQlException in close - " + se.toString());
493  se.printStackTrace();
494  }
495  try {
496  if (session != null) {
497  client.disconnect(session);
498  }
499  } catch (TOmniSciException ex) {
500  LOGGER.error("TOmniSciException - in finalization " + ex.getError_msg());
501  ex.printStackTrace();
502  } catch (TException ex) {
503  LOGGER.error("TException - in finalization" + ex.toString());
504  ex.printStackTrace();
505  }
506  }
507  }
508 
509  private void run_init(Connection conn) {
510  // attempt to open file
511  String line = "";
512  try {
513  BufferedReader reader =
514  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
515  Statement stmt = conn.createStatement();
516  while ((line = reader.readLine()) != null) {
517  if (line.isEmpty()) {
518  continue;
519  }
520  LOGGER.info("Running : " + line);
521  stmt.execute(line);
522  }
523  stmt.close();
524  reader.close();
525  } catch (IOException e) {
526  LOGGER.error("Exception occurred trying to read initialize file: "
527  + cmd.getOptionValue("initFile"));
528  exit(1);
529  } catch (SQLException e) {
530  LOGGER.error(
531  "Exception occurred trying to execute initialize file entry : " + line);
532  exit(1);
533  }
534  }
535 
536  private void help(Options options) {
537  // automatically generate the help statement
538  HelpFormatter formatter = new HelpFormatter();
539  formatter.setOptionComparator(null); // get options in the order they are created
540  formatter.printHelp("SQLImporter", options);
541  }
542 
543  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
544  throws SQLException {
546  String tName = cmd.getOptionValue("targetTable");
547 
548  if (tableExists(tName)) {
549  // check if we want to truncate
550  if (cmd.hasOption("truncate")) {
551  executeMapDCommand("Drop table " + tName);
552  createMapDTable(otherdb_conn, md);
553  } else {
554  List<TColumnType> columnInfo = getColumnInfo(tName);
555  verifyColumnSignaturesMatch(otherdb_conn, columnInfo, md);
556  }
557  } else {
558  createMapDTable(otherdb_conn, md);
559  }
560  }
561 
562  private void verifyColumnSignaturesMatch(Connection otherdb_conn,
563  List<TColumnType> dstColumns,
564  ResultSetMetaData srcColumns) throws SQLException {
565  if (srcColumns.getColumnCount() != dstColumns.size()) {
566  LOGGER.error("Table sizes do not match: Destination " + dstColumns.size()
567  + " versus Source " + srcColumns.getColumnCount());
568  exit(1);
569  }
570  for (int i = 1; i <= dstColumns.size(); ++i) {
571  if (!dstColumns.get(i - 1).getCol_name().equalsIgnoreCase(
572  srcColumns.getColumnName(i))) {
573  LOGGER.error(
574  "Destination table does not have matching column in same order for column number "
575  + i + " destination column name is " + dstColumns.get(i - 1).col_name
576  + " versus target column " + srcColumns.getColumnName(i));
577  exit(1);
578  }
579  TDatumType dstType = dstColumns.get(i - 1).getCol_type().getType();
580  int dstPrecision = dstColumns.get(i - 1).getCol_type().getPrecision();
581  int dstScale = dstColumns.get(i - 1).getCol_type().getScale();
582  int srcType = srcColumns.getColumnType(i);
583  int srcPrecision = srcColumns.getPrecision(i);
584  int srcScale = srcColumns.getScale(i);
585 
586  boolean match = false;
587  switch (srcType) {
588  case java.sql.Types.TINYINT:
589  match |= dstType == TDatumType.TINYINT;
590  // NOTE: it's okay to import smaller type to a bigger one,
591  // so we just fall through and try to match the next type.
592  // But the order of case statements is important here!
593  case java.sql.Types.SMALLINT:
594  match |= dstType == TDatumType.SMALLINT;
595  case java.sql.Types.INTEGER:
596  match |= dstType == TDatumType.INT;
597  case java.sql.Types.BIGINT:
598  match |= dstType == TDatumType.BIGINT;
599  if (cmd.hasOption("AllowIntegerNarrowing")) {
600  match |= dstType == TDatumType.TINYINT || dstType == TDatumType.SMALLINT
601  || dstType == TDatumType.INT;
602  }
603  break;
604  case java.sql.Types.DECIMAL:
605  case java.sql.Types.NUMERIC:
606  match = dstType == TDatumType.DECIMAL && dstPrecision == srcPrecision
607  && dstScale == srcScale;
608  break;
609  case java.sql.Types.FLOAT:
610  case java.sql.Types.REAL:
611  match |= dstType == TDatumType.FLOAT;
612  // Fall through and try double
613  case java.sql.Types.DOUBLE:
614  match |= dstType == TDatumType.DOUBLE;
615  if (cmd.hasOption("AllowDoubleToFloat")) {
616  match |= dstType == TDatumType.FLOAT;
617  }
618  break;
619  case java.sql.Types.TIME:
620  match = dstType == TDatumType.TIME;
621  break;
622  case java.sql.Types.TIMESTAMP:
623  match = dstType == TDatumType.TIMESTAMP;
624  break;
625  case java.sql.Types.DATE:
626  match = dstType == TDatumType.DATE;
627  break;
628  case java.sql.Types.BOOLEAN:
629  case java.sql.Types
630  .BIT: // deal with postgres treating boolean as bit... this will bite me
631  match = dstType == TDatumType.BOOL;
632  break;
633  case java.sql.Types.NVARCHAR:
634  case java.sql.Types.VARCHAR:
635  case java.sql.Types.NCHAR:
636  case java.sql.Types.CHAR:
637  case java.sql.Types.LONGVARCHAR:
638  case java.sql.Types.LONGNVARCHAR:
639  match = (dstType == TDatumType.STR || dstType == TDatumType.POINT
640  || dstType == TDatumType.POLYGON || dstType == TDatumType.MULTIPOLYGON
641  || dstType == TDatumType.LINESTRING);
642  break;
643  case java.sql.Types.OTHER:
644  // NOTE: I ignore subtypes (geography vs geopetry vs none) here just because
645  // it makes no difference for OmniSciDB at the moment
646  Db_vendor_types.GisType gisType =
647  vendor_types.find_gis_type(otherdb_conn, srcColumns, i);
648  if (gisType.srid != dstScale) {
649  match = false;
650  break;
651  }
652  switch (dstType) {
653  case POINT:
654  match = gisType.type.equalsIgnoreCase("POINT");
655  break;
656  case LINESTRING:
657  match = gisType.type.equalsIgnoreCase("LINESTRING");
658  break;
659  case POLYGON:
660  match = gisType.type.equalsIgnoreCase("POLYGON");
661  break;
662  case MULTIPOLYGON:
663  match = gisType.type.equalsIgnoreCase("MULTIPOLYGON");
664  break;
665  default:
666  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
667  + " not Supported");
668  exit(1);
669  }
670  break;
671  default:
672  LOGGER.error("Column type " + JDBCType.valueOf(srcType).getName()
673  + " not Supported");
674  exit(1);
675  }
676  if (!match) {
677  LOGGER.error("Source and destination types for column "
678  + srcColumns.getColumnName(i)
679  + " do not match. Please make sure that type, precision and scale are exactly the same");
680  exit(1);
681  }
682  }
683  }
684 
685  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
686  StringBuilder sb = new StringBuilder();
687  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
688 
689  // Now iterate the metadata
690  try {
691  for (int i = 1; i <= metaData.getColumnCount(); i++) {
692  if (i > 1) {
693  sb.append(",");
694  }
695  LOGGER.debug("Column name is " + metaData.getColumnName(i));
696  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
697  LOGGER.debug("Column type is " + metaData.getColumnType(i));
698 
699  sb.append(metaData.getColumnName(i)).append(" ");
700  int col_type = metaData.getColumnType(i);
701  if (col_type == java.sql.Types.OTHER) {
702  Db_vendor_types.GisType type =
703  vendor_types.find_gis_type(otherdb_conn, metaData, i);
704  sb.append(Db_vendor_types.gis_type_to_str(type));
705  } else {
706  sb.append(getColType(metaData.getColumnType(i),
707  metaData.getPrecision(i),
708  metaData.getScale(i)));
709  }
710  }
711  sb.append(")");
712 
713  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
714  sb.append(" with (fragment_size = ");
715  sb.append(cmd.getOptionValue("fragmentSize", "0"));
716  sb.append(")");
717  }
718 
719  } catch (SQLException ex) {
720  LOGGER.error("Error processing the metadata - " + ex.toString());
721  exit(1);
722  }
723 
724  executeMapDCommand(sb.toString());
725  }
726 
727  private void createMapDConnection() {
728  TTransport transport = null;
729  TProtocol protocol = new TBinaryProtocol(transport);
730  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
731  String server = cmd.getOptionValue("server", "localhost");
732  try {
733  // Uses default certificate stores.
734  boolean load_trust_store = cmd.hasOption("https");
735  SockTransportProperties skT = null;
736  if (cmd.hasOption("https")) {
737  skT = SockTransportProperties.getEncryptedClientDefaultTrustStore(
738  !cmd.hasOption("insecure"));
739  transport = skT.openHttpsClientTransport(server, port);
740  transport.open();
741  protocol = new TJSONProtocol(transport);
742  } else if (cmd.hasOption("http")) {
743  skT = SockTransportProperties.getUnencryptedClient();
744  transport = skT.openHttpClientTransport(server, port);
745  protocol = new TJSONProtocol(transport);
746  } else {
747  skT = SockTransportProperties.getUnencryptedClient();
748  transport = skT.openClientTransport(server, port);
749  transport.open();
750  protocol = new TBinaryProtocol(transport);
751  }
752 
753  client = new OmniSci.Client(protocol);
754  // This if will be useless until PKI signon
755  if (cmd.hasOption("user")) {
756  session = client.connect(cmd.getOptionValue("user", "admin"),
757  cmd.getOptionValue("passwd", "HyperInteractive"),
758  cmd.getOptionValue("database", "omnisci"));
759  }
760  LOGGER.debug("Connected session is " + session);
761 
762  } catch (TTransportException ex) {
763  LOGGER.error("Connection failed - " + ex.toString());
764  exit(1);
765  } catch (TOmniSciException ex) {
766  LOGGER.error("Connection failed - " + ex.getError_msg());
767  exit(2);
768  } catch (TException ex) {
769  LOGGER.error("Connection failed - " + ex.toString());
770  exit(3);
771  } catch (Exception ex) {
772  LOGGER.error("General exception - " + ex.toString());
773  exit(4);
774  }
775  }
776 
777  private List<TColumnType> getColumnInfo(String tName) {
778  LOGGER.debug("Getting columns for " + tName);
779  List<TColumnType> row_descriptor = null;
780  try {
781  TTableDetails table_details = client.get_table_details(session, tName);
782  row_descriptor = table_details.row_desc;
783  } catch (TOmniSciException ex) {
784  LOGGER.error("column check failed - " + ex.getError_msg());
785  exit(3);
786  } catch (TException ex) {
787  LOGGER.error("column check failed - " + ex.toString());
788  exit(3);
789  }
790  return row_descriptor;
791  }
792 
793  private boolean tableExists(String tName) {
794  LOGGER.debug("Check for table " + tName);
795  try {
796  List<String> recv_get_tables = client.get_tables(session);
797  for (String s : recv_get_tables) {
798  if (s.equals(tName)) {
799  return true;
800  }
801  }
802  } catch (TOmniSciException ex) {
803  LOGGER.error("Table check failed - " + ex.getError_msg());
804  exit(3);
805  } catch (TException ex) {
806  LOGGER.error("Table check failed - " + ex.toString());
807  exit(3);
808  }
809  return false;
810  }
811 
812  private void executeMapDCommand(String sql) {
813  LOGGER.info("Run Command - " + sql);
814 
815  try {
816  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
817  } catch (TOmniSciException ex) {
818  LOGGER.error("SQL Execute failed - " + ex.getError_msg());
819  exit(1);
820  } catch (TException ex) {
821  LOGGER.error("SQL Execute failed - " + ex.toString());
822  exit(1);
823  }
824  }
825 
826  private String getColType(int cType, int precision, int scale) {
827  // Note - if cType is OTHER a earlier call will have been made
828  // to try and work out the db vendors specific type.
829  if (precision > 19) {
830  precision = 19;
831  }
832  if (scale > 19) {
833  scale = 18;
834  }
835  switch (cType) {
836  case java.sql.Types.TINYINT:
837  return ("TINYINT");
838  case java.sql.Types.SMALLINT:
839  return ("SMALLINT");
840  case java.sql.Types.INTEGER:
841  return ("INTEGER");
842  case java.sql.Types.BIGINT:
843  return ("BIGINT");
844  case java.sql.Types.FLOAT:
845  return ("FLOAT");
846  case java.sql.Types.DECIMAL:
847  return ("DECIMAL(" + precision + "," + scale + ")");
848  case java.sql.Types.DOUBLE:
849  return ("DOUBLE");
850  case java.sql.Types.REAL:
851  return ("REAL");
852  case java.sql.Types.NUMERIC:
853  return ("NUMERIC(" + precision + "," + scale + ")");
854  case java.sql.Types.TIME:
855  return ("TIME");
856  case java.sql.Types.TIMESTAMP:
857  return ("TIMESTAMP");
858  case java.sql.Types.DATE:
859  return ("DATE");
860  case java.sql.Types.BOOLEAN:
861  case java.sql.Types
862  .BIT: // deal with postgress treating boolean as bit... this will bite me
863  return ("BOOLEAN");
864  case java.sql.Types.NVARCHAR:
865  case java.sql.Types.VARCHAR:
866  case java.sql.Types.NCHAR:
867  case java.sql.Types.CHAR:
868  case java.sql.Types.LONGVARCHAR:
869  case java.sql.Types.LONGNVARCHAR:
870  return ("TEXT ENCODING DICT");
871  default:
872  throw new AssertionError("Column type " + cType + " not Supported");
873  }
874  }
875 
876  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
877  throws SQLException {
878  TColumn col = new TColumn();
879 
880  col.nulls = new ArrayList<Boolean>(bufferSize);
881 
882  col.data = new TColumnData();
883 
884  switch (md.getColumnType(i)) {
885  case java.sql.Types.TINYINT:
886  case java.sql.Types.SMALLINT:
887  case java.sql.Types.INTEGER:
888  case java.sql.Types.BIGINT:
889  case java.sql.Types.TIME:
890  case java.sql.Types.TIMESTAMP:
891  case java.sql.Types
892  .BIT: // deal with postgress treating boolean as bit... this will bite me
893  case java.sql.Types.BOOLEAN:
894  case java.sql.Types.DATE:
895  case java.sql.Types.DECIMAL:
896  case java.sql.Types.NUMERIC:
897  col.data.int_col = new ArrayList<Long>(bufferSize);
898  break;
899 
900  case java.sql.Types.FLOAT:
901  case java.sql.Types.DOUBLE:
902  case java.sql.Types.REAL:
903  col.data.real_col = new ArrayList<Double>(bufferSize);
904  break;
905 
906  case java.sql.Types.NVARCHAR:
907  case java.sql.Types.VARCHAR:
908  case java.sql.Types.NCHAR:
909  case java.sql.Types.CHAR:
910  case java.sql.Types.LONGVARCHAR:
911  case java.sql.Types.LONGNVARCHAR:
912  case java.sql.Types.OTHER:
913  col.data.str_col = new ArrayList<String>(bufferSize);
914  break;
915 
916  default:
917  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
918  }
919  return col;
920  }
921 
922  private void setColValue(ResultSet rs,
923  TColumn col,
924  int columnType,
925  int colNum,
926  int scale,
927  String colTypeName) throws SQLException {
928  switch (columnType) {
929  case java.sql.Types
930  .BIT: // deal with postgress treating boolean as bit... this will bite me
931  case java.sql.Types.BOOLEAN:
932  Boolean b = rs.getBoolean(colNum);
933  if (rs.wasNull()) {
934  col.nulls.add(Boolean.TRUE);
935  col.data.int_col.add(0L);
936  } else {
937  col.nulls.add(Boolean.FALSE);
938  col.data.int_col.add(b ? 1L : 0L);
939  }
940  break;
941 
942  case java.sql.Types.DECIMAL:
943  case java.sql.Types.NUMERIC:
944  BigDecimal bd = rs.getBigDecimal(colNum);
945  if (rs.wasNull()) {
946  col.nulls.add(Boolean.TRUE);
947  col.data.int_col.add(0L);
948  } else {
949  col.nulls.add(Boolean.FALSE);
950  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
951  }
952  break;
953 
954  case java.sql.Types.TINYINT:
955  case java.sql.Types.SMALLINT:
956  case java.sql.Types.INTEGER:
957  case java.sql.Types.BIGINT:
958  Long l = rs.getLong(colNum);
959  if (rs.wasNull()) {
960  col.nulls.add(Boolean.TRUE);
961  col.data.int_col.add(new Long(0));
962  } else {
963  col.nulls.add(Boolean.FALSE);
964  col.data.int_col.add(l);
965  }
966  break;
967 
968  case java.sql.Types.TIME:
969  Time t = rs.getTime(colNum);
970  if (rs.wasNull()) {
971  col.nulls.add(Boolean.TRUE);
972  col.data.int_col.add(0L);
973 
974  } else {
975  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
976  col.nulls.add(Boolean.FALSE);
977  }
978 
979  break;
980  case java.sql.Types.TIMESTAMP:
981  Timestamp ts = rs.getTimestamp(colNum);
982  if (rs.wasNull()) {
983  col.nulls.add(Boolean.TRUE);
984  col.data.int_col.add(0L);
985 
986  } else {
987  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
988  col.nulls.add(Boolean.FALSE);
989  }
990 
991  break;
992  case java.sql.Types.DATE:
993  Date d = rs.getDate(colNum);
994  if (rs.wasNull()) {
995  col.nulls.add(Boolean.TRUE);
996  col.data.int_col.add(0L);
997 
998  } else {
999  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
1000  col.nulls.add(Boolean.FALSE);
1001  }
1002  break;
1003  case java.sql.Types.FLOAT:
1004  case java.sql.Types.DOUBLE:
1005  case java.sql.Types.REAL:
1006  Double db = rs.getDouble(colNum);
1007  if (rs.wasNull()) {
1008  col.nulls.add(Boolean.TRUE);
1009  col.data.real_col.add(new Double(0));
1010 
1011  } else {
1012  col.nulls.add(Boolean.FALSE);
1013  col.data.real_col.add(db);
1014  }
1015  break;
1016 
1017  case java.sql.Types.NVARCHAR:
1018  case java.sql.Types.VARCHAR:
1019  case java.sql.Types.NCHAR:
1020  case java.sql.Types.CHAR:
1021  case java.sql.Types.LONGVARCHAR:
1022  case java.sql.Types.LONGNVARCHAR:
1023  String strVal = rs.getString(colNum);
1024  if (rs.wasNull()) {
1025  col.nulls.add(Boolean.TRUE);
1026  col.data.str_col.add("");
1027 
1028  } else {
1029  col.data.str_col.add(strVal);
1030  col.nulls.add(Boolean.FALSE);
1031  }
1032  break;
1033  case java.sql.Types.OTHER:
1034  Object objVal = rs.getObject(colNum);
1035  if (rs.wasNull()) {
1036  col.nulls.add(Boolean.TRUE);
1037  col.data.str_col.add("");
1038  } else {
1039  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
1040  col.nulls.add(Boolean.FALSE);
1041  }
1042  break;
1043  default:
1044  throw new AssertionError("Column type " + columnType + " not Supported");
1045  }
1046  }
1047 
1048  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
1049  throws SQLException {
1050  col.nulls.clear();
1051 
1052  switch (md.getColumnType(i)) {
1053  case java.sql.Types.TINYINT:
1054  case java.sql.Types.SMALLINT:
1055  case java.sql.Types.INTEGER:
1056  case java.sql.Types.BIGINT:
1057  case java.sql.Types.TIME:
1058  case java.sql.Types.TIMESTAMP:
1059  case java.sql.Types
1060  .BIT: // deal with postgress treating boolean as bit... this will bite me
1061  case java.sql.Types.BOOLEAN:
1062  case java.sql.Types.DATE:
1063  case java.sql.Types.DECIMAL:
1064  case java.sql.Types.NUMERIC:
1065  col.data.int_col.clear();
1066  break;
1067 
1068  case java.sql.Types.FLOAT:
1069  case java.sql.Types.DOUBLE:
1070  case java.sql.Types.REAL:
1071  col.data.real_col.clear();
1072  break;
1073 
1074  case java.sql.Types.NVARCHAR:
1075  case java.sql.Types.VARCHAR:
1076  case java.sql.Types.NCHAR:
1077  case java.sql.Types.CHAR:
1078  case java.sql.Types.LONGVARCHAR:
1079  case java.sql.Types.LONGNVARCHAR:
1080  case java.sql.Types.OTHER:
1081  col.data.str_col.clear();
1082  break;
1083  default:
1084  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
1085  }
1086  }
1087 }
#define LINESTRING
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
size_t append(FILE *f, const size_t size, const int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:161
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
#define SMALLINT
#define DOUBLE
#define BIGINT
void help(Options options)
#define DATE
static void main(String[] args)
#define MULTIPOLYGON
#define POINT
void doWork(String[] args)
#define TIME
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
#define TINYINT
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
void verifyColumnSignaturesMatch(Connection otherdb_conn, List< TColumnType > dstColumns, ResultSetMetaData srcColumns)
tuple line
Definition: parse_ast.py:10
static final Logger LOGGER
#define TIMESTAMP
boolean tableExists(String tName)
char * t
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
#define POLYGON
#define FLOAT
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)