OmniSciDB  29e35f4d58
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
22 import com.mapd.thrift.server.MapD;
23 import com.mapd.thrift.server.TColumn;
24 import com.mapd.thrift.server.TColumnData;
25 import com.mapd.thrift.server.TColumnType;
26 import com.mapd.thrift.server.TMapDException;
27 import com.mapd.thrift.server.TQueryResult;
28 import com.mapd.thrift.server.TTableDetails;
30 
31 import org.apache.commons.cli.*;
32 import org.apache.thrift.TException;
33 import org.apache.thrift.protocol.TBinaryProtocol;
34 import org.apache.thrift.protocol.TJSONProtocol;
35 import org.apache.thrift.protocol.TProtocol;
36 import org.apache.thrift.transport.TSocket;
37 import org.apache.thrift.transport.TTransport;
38 import org.apache.thrift.transport.TTransportException;
39 import org.slf4j.Logger;
40 import org.slf4j.LoggerFactory;
41 
42 import java.io.BufferedReader;
43 import java.io.FileReader;
44 import java.io.IOException;
45 import java.math.BigDecimal;
46 import java.security.KeyStore;
47 import java.sql.*;
48 import java.time.*;
49 import java.util.ArrayList;
50 import java.util.List;
51 
52 interface DateTimeUtils {
53  long getSecondsFromMilliseconds(long milliseconds);
54 }
55 
56 class MutuallyExlusiveOptionsException extends ParseException {
58  super(message);
59  }
60 
61  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
62  StringBuffer sb = new StringBuffer(
63  "Mutually exclusive options used. " + errMsg + ". Options provided [");
64  for (String s : strings) {
65  sb.append(s);
66  sb.append(" ");
67  }
68  sb.setCharAt(sb.length() - 1, ']');
69  return new MutuallyExlusiveOptionsException(sb.toString());
70  }
71 }
73  private Options options = new Options();
74 
75  void printVersion() {
76  System.out.println("SQLImporter Version 4.6.0");
77  }
78 
80  StringBuffer sb = new StringBuffer("\nSQLImporter ");
81  // Ready for PKI auth
82  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
83  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
84  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
85  // sb.append("([--ca-trust-store <ca trust store file name>]
86  // --ca-trust-store-password
87  // <trust store password> | --insecure)\n");
88  sb.append(
89  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
90  sb.append(
91  "-su <other database user> -sp <other database user password> -su <other database sql statement>\n");
92  sb.append(
93  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
94  sb.append("[-tr] -i <init commands file>\n");
95  sb.append("\nSQLImporter -h | --help\n\n");
96 
97  HelpFormatter formatter = new HelpFormatter();
98  // Forces help to print out options in order they were added rather
99  // than in alphabetical order
100  formatter.setOptionComparator(null);
101  int help_width = 100;
102  formatter.printHelp(help_width, sb.toString(), "", options, "");
103  }
104 
106  options.addOption("r", true, "Row Load Limit");
107 
108  // OmniSci authentication options
109  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
110  options.addOption(
111  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
112  options.addOption(Option.builder("p")
113  .hasArg()
114  .desc("OmniSci Password")
115  .longOpt("passwd")
116  .build());
117  // OmniSci transport options
118  OptionGroup transport_grp = new OptionGroup();
119  transport_grp.addOption(Option.builder()
120  .desc("use binary transport to connect to OmniSci ")
121  .longOpt("binary")
122  .build());
123  transport_grp.addOption(Option.builder()
124  .desc("use http transport to connect to OmniSci ")
125  .longOpt("http")
126  .build());
127  transport_grp.addOption(Option.builder()
128  .desc("use https transport to connect to OmniSci ")
129  .longOpt("https")
130  .build());
131  options.addOptionGroup(transport_grp);
132 
133  // OmniSci database server details
134  options.addOption(Option.builder("s")
135  .hasArg()
136  .desc("OmniSci Server")
137  .longOpt("server")
138  .build());
139  options.addOption(Option.builder("db")
140  .hasArg()
141  .desc("OmniSci Database")
142  .longOpt("database")
143  .build());
144  options.addOption(
145  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
146 
147  // OmniSci server authentication options
148  options.addOption(Option.builder()
149  .hasArg()
150  .desc("CA certificate trust store")
151  .longOpt("ca-trust-store")
152  .build());
153  options.addOption(Option.builder()
154  .hasArg()
155  .desc("CA certificate trust store password")
156  .longOpt("ca-trust-store-passwd")
157  .build());
158  options.addOption(
159  Option.builder()
160  .desc("Inseure TLS - do not validate server OmniSci server credentials")
161  .longOpt("insecure")
162  .build());
163 
164  // Other database connection details
165  options.addOption(Option.builder("d")
166  .hasArg()
167  .desc("JDBC driver class")
168  .longOpt("driver")
169  .build());
170  options.addOption(Option.builder("c")
171  .hasArg()
172  .desc("JDBC Connection string")
173  .longOpt("jdbcConnect")
174  .required()
175  .build());
176  options.addOption(Option.builder("su")
177  .hasArg()
178  .desc("Source User")
179  .longOpt("sourceUser")
180  .required()
181  .build());
182  options.addOption(Option.builder("sp")
183  .hasArg()
184  .desc("Source Password")
185  .longOpt("sourcePasswd")
186  .required()
187  .build());
188  options.addOption(Option.builder("ss")
189  .hasArg()
190  .desc("SQL Select statement")
191  .longOpt("sqlStmt")
192  .required()
193  .build());
194 
195  options.addOption(Option.builder("t")
196  .hasArg()
197  .desc("OmniSci Target Table")
198  .longOpt("targetTable")
199  .required()
200  .build());
201 
202  options.addOption(Option.builder("b")
203  .hasArg()
204  .desc("transfer buffer size")
205  .longOpt("bufferSize")
206  .build());
207  options.addOption(Option.builder("f")
208  .hasArg()
209  .desc("table fragment size")
210  .longOpt("fragmentSize")
211  .build());
212 
213  options.addOption(Option.builder("tr")
214  .desc("Truncate table if it exists")
215  .longOpt("truncate")
216  .build());
217  options.addOption(Option.builder("i")
218  .hasArg()
219  .desc("File containing init command for DB")
220  .longOpt("initializeFile")
221  .build());
222  }
223 
224  private Option setOptionRequired(Option option) {
225  option.setRequired(true);
226  return option;
227  }
228 
229  public CommandLine parse(String[] args) throws ParseException {
230  CommandLineParser clp = new DefaultParser() {
231  public CommandLine parse(Options options, String[] strings) throws ParseException {
232  Options helpOptions = new Options();
233  helpOptions.addOption(
234  Option.builder("h").desc("help message").longOpt("help").build());
235  try {
236  CommandLine cmd = super.parse(helpOptions, strings);
237  } catch (UnrecognizedOptionException uE) {
238  }
239  if (cmd.hasOption("help")) {
240  printHelpMessage();
241  exit(0);
242  }
243  if (cmd.hasOption("version")) {
244  printVersion();
245  exit(0);
246  }
247  cmd = super.parse(options, strings);
248  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
249  throw new MissingArgumentException(
250  "Must supply either an OmniSci db user or a user certificate");
251  }
252  // if user supplied must have password and visa versa
253  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
254  options.addOption(setOptionRequired(options.getOption("user")));
255  options.addOption(setOptionRequired(options.getOption("passwd")));
256  super.parse(options, strings);
257  }
258 
259  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
260  // and
261  // visa versa
262  if (false) {
263  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
264  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
265  options.addOption(
266  setOptionRequired(options.getOption("ca-trust-store-password")));
267  super.parse(options, strings);
268  }
269  if (options.getOption("user").isRequired()
270  && options.getOption("client-key").isRequired()) {
273  "user/password can not be use with client-cert/client-key",
274  strings);
275  throw meo;
276  }
277 
278  if (cmd.hasOption("http")
279  || cmd.hasOption("binary")
280  && (cmd.hasOption("client-cert")
281  || cmd.hasOption("client-key"))) {
283  "http|binary can not be use with ca-cert|client-cert|client-key",
284  strings);
285  }
286  }
287 
288  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
290  "insecure can only be use with https", strings);
291  throw meo;
292  }
293 
294  return cmd;
295  }
296 
297  public CommandLine parse(Options options, String[] strings, boolean b)
298  throws ParseException {
299  return null;
300  }
301  };
302  return clp.parse(options, args);
303  }
304 }
305 
306 public class SQLImporter {
307  protected String session = null;
308  protected MapD.Client client = null;
309  private CommandLine cmd = null;
310  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
311  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
312  return milliseconds / 1000;
313  };
314 
315  Db_vendor_types vendor_types = null;
316 
317  public static void main(String[] args) {
318  SQLImporter sq = new SQLImporter();
319  sq.doWork(args);
320  }
321 
322  void doWork(String[] args) {
323  // create Options object
324 
325  SQLImporter_args s_args = new SQLImporter_args();
326 
327  try {
328  cmd = s_args.parse(args);
329  } catch (ParseException ex) {
330  LOGGER.error(ex.getLocalizedMessage());
331  s_args.printHelpMessage();
332  exit(0);
333  }
334  executeQuery();
335  }
336 
337  void executeQuery() {
338  Connection conn = null;
339  Statement stmt = null;
340 
341  long totalTime = 0;
342 
343  try {
344  // Open a connection
345  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
346  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
347  cmd.getOptionValue("sourceUser"),
348  cmd.getOptionValue("sourcePasswd"));
349  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
350  long startTime = System.currentTimeMillis();
351 
352  // run init file script on targe DB if present
353  if (cmd.hasOption("initializeFile")) {
354  run_init(conn);
355  }
356 
357  // set autocommit off to allow postgress to not load all results
358  try {
359  conn.setAutoCommit(false);
360  } catch (SQLException se) {
361  LOGGER.warn(
362  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
363  + se.toString());
364  }
365 
366  // Execute a query
367  stmt = conn.createStatement();
368 
369  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
370  // set the jdbc fetch buffer size to reduce the amount of records being moved to
371  // java from postgress
372  stmt.setFetchSize(bufferSize);
373  long timer;
374 
375  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
376 
377  // check if table already exists and is compatible in OmniSci with the query
378  // metadata
379  ResultSetMetaData md = rs.getMetaData();
380  checkMapDTable(conn, md);
381 
382  timer = System.currentTimeMillis();
383 
384  long resultCount = 0;
385  int bufferCount = 0;
386  long total = 0;
387 
388  List<TColumn> cols = new ArrayList(md.getColumnCount());
389  for (int i = 1; i <= md.getColumnCount(); i++) {
390  TColumn col = setupBinaryColumn(i, md, bufferSize);
391  cols.add(col);
392  }
393 
394  // read data from old DB
395  while (rs.next()) {
396  for (int i = 1; i <= md.getColumnCount(); i++) {
397  setColValue(rs,
398  cols.get(i - 1),
399  md.getColumnType(i),
400  i,
401  md.getScale(i),
402  md.getColumnTypeName(i));
403  }
404  resultCount++;
405  bufferCount++;
406  if (bufferCount == bufferSize) {
407  bufferCount = 0;
408  // send the buffer to mapD
409  client.load_table_binary_columnar(
410  session, cmd.getOptionValue("targetTable"), cols); // old
411  // recreate columnar store for use
412  for (int i = 1; i <= md.getColumnCount(); i++) {
413  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
414  }
415 
416  if (resultCount % 100000 == 0) {
417  LOGGER.info("Imported " + resultCount + " records");
418  }
419  }
420  }
421  if (bufferCount > 0) {
422  // send the LAST buffer to mapD
423  client.load_table_binary_columnar(
424  session, cmd.getOptionValue("targetTable"), cols);
425  bufferCount = 0;
426  }
427  LOGGER.info("result set count is " + resultCount + " read time is "
428  + (System.currentTimeMillis() - timer) + "ms");
429 
430  // Clean-up environment
431  rs.close();
432  stmt.close();
433 
434  totalTime = System.currentTimeMillis() - startTime;
435  conn.close();
436  } catch (SQLException se) {
437  LOGGER.error("SQLException - " + se.toString());
438  se.printStackTrace();
439  } catch (TMapDException ex) {
440  LOGGER.error("TMapDException - " + ex.toString());
441  ex.printStackTrace();
442  } catch (TException ex) {
443  LOGGER.error("TException failed - " + ex.toString());
444  ex.printStackTrace();
445  } finally {
446  // finally block used to close resources
447  try {
448  if (stmt != null) {
449  stmt.close();
450  }
451  } catch (SQLException se2) {
452  } // nothing we can do
453  try {
454  if (conn != null) {
455  conn.close();
456  }
457  } catch (SQLException se) {
458  LOGGER.error("SQlException in close - " + se.toString());
459  se.printStackTrace();
460  } // end finally try
461  } // end try
462  }
463 
464  private void run_init(Connection conn) {
465  // attempt to open file
466  String line = "";
467  try {
468  BufferedReader reader =
469  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
470  Statement stmt = conn.createStatement();
471  while ((line = reader.readLine()) != null) {
472  if (line.isEmpty()) {
473  continue;
474  }
475  LOGGER.info("Running : " + line);
476  stmt.execute(line);
477  }
478  stmt.close();
479  reader.close();
480  } catch (IOException e) {
481  LOGGER.error("Exception occurred trying to read initialize file: "
482  + cmd.getOptionValue("initFile"));
483  exit(1);
484  } catch (SQLException e) {
485  LOGGER.error(
486  "Exception occurred trying to execute initialize file entry : " + line);
487  exit(1);
488  }
489  }
490 
491  private void help(Options options) {
492  // automatically generate the help statement
493  HelpFormatter formatter = new HelpFormatter();
494  formatter.setOptionComparator(null); // get options in the order they are created
495  formatter.printHelp("SQLImporter", options);
496  }
497 
498  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
499  throws SQLException {
500  createMapDConnection();
501  String tName = cmd.getOptionValue("targetTable");
502 
503  if (tableExists(tName)) {
504  // check if we want to truncate
505  if (cmd.hasOption("truncate")) {
506  executeMapDCommand("Drop table " + tName);
507  createMapDTable(otherdb_conn, md);
508  } else {
509  List<TColumnType> columnInfo = getColumnInfo(tName);
510  // table exists lets check it has same number of columns
511 
512  if (md.getColumnCount() != columnInfo.size()) {
513  LOGGER.error("Table sizes do not match - OmniSci " + columnInfo.size()
514  + " versus Select " + md.getColumnCount());
515  exit(1);
516  }
517  // table exists lets check it is same layout - check names will do for now
518  // Note weird start from 1 and reduce index by one is due to sql metatdata
519  // beinging with 1 not 0
520  for (int colNum = 1; colNum <= columnInfo.size(); colNum++) {
521  if (!columnInfo.get(colNum - 1)
522  .col_name.equalsIgnoreCase(md.getColumnName(colNum))) {
523  LOGGER.error(
524  "OmniSci Table does not have matching column in same order for column number"
525  + colNum + " OmniSci column name is "
526  + columnInfo.get(colNum - 1).col_name + " versus Select "
527  + md.getColumnName(colNum));
528  exit(1);
529  }
530  }
531  }
532  } else {
533  createMapDTable(otherdb_conn, md);
534  }
535  }
536 
537  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
538  StringBuilder sb = new StringBuilder();
539  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
540 
541  // Now iterate the metadata
542  try {
543  for (int i = 1; i <= metaData.getColumnCount(); i++) {
544  if (i > 1) {
545  sb.append(",");
546  }
547  LOGGER.debug("Column name is " + metaData.getColumnName(i));
548  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
549  LOGGER.debug("Column type is " + metaData.getColumnType(i));
550 
551  sb.append(metaData.getColumnName(i)).append(" ");
552  int col_type = metaData.getColumnType(i);
553  if (col_type == java.sql.Types.OTHER) {
554  sb.append(vendor_types.find_gis_type(otherdb_conn, metaData, i));
555  } else {
556  sb.append(getColType(metaData.getColumnType(i),
557  metaData.getPrecision(i),
558  metaData.getScale(i)));
559  }
560  }
561  sb.append(")");
562 
563  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
564  sb.append(" with (fragment_size = ");
565  sb.append(cmd.getOptionValue("fragmentSize", "0"));
566  sb.append(")");
567  }
568 
569  } catch (SQLException ex) {
570  LOGGER.error("Error processing the metadata - " + ex.toString());
571  exit(1);
572  }
573 
574  executeMapDCommand(sb.toString());
575  }
576 
577  private void createMapDConnection() {
578  TTransport transport = null;
579  TProtocol protocol = new TBinaryProtocol(transport);
580  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
581  String server = cmd.getOptionValue("server", "localhost");
582  try {
583  // Uses default certificate stores.
584  boolean load_trust_store = cmd.hasOption("https");
585  SockTransportProperties skT = null;
586  if (cmd.hasOption("https")) {
588  !cmd.hasOption("insecure"));
589  transport = skT.openHttpsClientTransport(server, port);
590  transport.open();
591  protocol = new TJSONProtocol(transport);
592  } else if (cmd.hasOption("http")) {
594  transport = skT.openHttpClientTransport(server, port);
595  protocol = new TJSONProtocol(transport);
596  } else {
598  transport = skT.openClientTransport(server, port);
599  transport.open();
600  protocol = new TBinaryProtocol(transport);
601  }
602 
603  client = new MapD.Client(protocol);
604  // This if will be useless until PKI signon
605  if (cmd.hasOption("user")) {
606  session = client.connect(cmd.getOptionValue("user", "admin"),
607  cmd.getOptionValue("passwd", "HyperInteractive"),
608  cmd.getOptionValue("database", "omnisci"));
609  }
610  LOGGER.debug("Connected session is " + session);
611 
612  } catch (TTransportException ex) {
613  LOGGER.error("Connection failed - " + ex.toString());
614  exit(1);
615  } catch (TMapDException ex) {
616  LOGGER.error("Connection failed - " + ex.toString());
617  exit(2);
618  } catch (TException ex) {
619  LOGGER.error("Connection failed - " + ex.toString());
620  exit(3);
621  } catch (Exception ex) {
622  LOGGER.error("General exception - " + ex.toString());
623  exit(4);
624  }
625  }
626 
627  private List<TColumnType> getColumnInfo(String tName) {
628  LOGGER.debug("Getting columns for " + tName);
629  List<TColumnType> row_descriptor = null;
630  try {
631  TTableDetails table_details = client.get_table_details(session, tName);
632  row_descriptor = table_details.row_desc;
633  } catch (TMapDException ex) {
634  LOGGER.error("column check failed - " + ex.toString());
635  exit(3);
636  } catch (TException ex) {
637  LOGGER.error("column check failed - " + ex.toString());
638  exit(3);
639  }
640  return row_descriptor;
641  }
642 
643  private boolean tableExists(String tName) {
644  LOGGER.debug("Check for table " + tName);
645  try {
646  List<String> recv_get_tables = client.get_tables(session);
647  for (String s : recv_get_tables) {
648  if (s.equals(tName)) {
649  return true;
650  }
651  }
652  } catch (TMapDException ex) {
653  LOGGER.error("Table check failed - " + ex.toString());
654  exit(3);
655  } catch (TException ex) {
656  LOGGER.error("Table check failed - " + ex.toString());
657  exit(3);
658  }
659  return false;
660  }
661 
662  private void executeMapDCommand(String sql) {
663  LOGGER.info(" run comamnd :" + sql);
664 
665  try {
666  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
667  } catch (TMapDException ex) {
668  LOGGER.error("SQL Execute failed - " + ex.toString());
669  exit(1);
670  } catch (TException ex) {
671  LOGGER.error("SQL Execute failed - " + ex.toString());
672  exit(1);
673  }
674  }
675 
676  private String getColType(int cType, int precision, int scale) {
677  // Note - if cType is OTHER a earlier call will have been made
678  // to try and work out the db vendors specific type.
679  if (precision > 19) {
680  precision = 19;
681  }
682  if (scale > 19) {
683  scale = 18;
684  }
685  switch (cType) {
686  case java.sql.Types.TINYINT:
687  return ("TINYINT");
688  case java.sql.Types.SMALLINT:
689  return ("SMALLINT");
690  case java.sql.Types.INTEGER:
691  return ("INTEGER");
692  case java.sql.Types.BIGINT:
693  return ("BIGINT");
694  case java.sql.Types.FLOAT:
695  return ("FLOAT");
696  case java.sql.Types.DECIMAL:
697  return ("DECIMAL(" + precision + "," + scale + ")");
698  case java.sql.Types.DOUBLE:
699  return ("DOUBLE");
700  case java.sql.Types.REAL:
701  return ("REAL");
702  case java.sql.Types.NUMERIC:
703  return ("NUMERIC(" + precision + "," + scale + ")");
704  case java.sql.Types.TIME:
705  return ("TIME");
706  case java.sql.Types.TIMESTAMP:
707  return ("TIMESTAMP");
708  case java.sql.Types.DATE:
709  return ("DATE");
710  case java.sql.Types.BOOLEAN:
711  case java.sql.Types
712  .BIT: // deal with postgress treating boolean as bit... this will bite me
713  return ("BOOLEAN");
714  case java.sql.Types.NVARCHAR:
715  case java.sql.Types.VARCHAR:
716  case java.sql.Types.NCHAR:
717  case java.sql.Types.CHAR:
718  case java.sql.Types.LONGVARCHAR:
719  case java.sql.Types.LONGNVARCHAR:
720  return ("TEXT ENCODING DICT");
721  default:
722  throw new AssertionError("Column type " + cType + " not Supported");
723  }
724  }
725 
726  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
727  throws SQLException {
728  TColumn col = new TColumn();
729 
730  col.nulls = new ArrayList<Boolean>(bufferSize);
731 
732  col.data = new TColumnData();
733 
734  switch (md.getColumnType(i)) {
735  case java.sql.Types.TINYINT:
736  case java.sql.Types.SMALLINT:
737  case java.sql.Types.INTEGER:
738  case java.sql.Types.BIGINT:
739  case java.sql.Types.TIME:
740  case java.sql.Types.TIMESTAMP:
741  case java.sql.Types
742  .BIT: // deal with postgress treating boolean as bit... this will bite me
743  case java.sql.Types.BOOLEAN:
744  case java.sql.Types.DATE:
745  case java.sql.Types.DECIMAL:
746  case java.sql.Types.NUMERIC:
747  col.data.int_col = new ArrayList<Long>(bufferSize);
748  break;
749 
750  case java.sql.Types.FLOAT:
751  case java.sql.Types.DOUBLE:
752  case java.sql.Types.REAL:
753  col.data.real_col = new ArrayList<Double>(bufferSize);
754  break;
755 
756  case java.sql.Types.NVARCHAR:
757  case java.sql.Types.VARCHAR:
758  case java.sql.Types.NCHAR:
759  case java.sql.Types.CHAR:
760  case java.sql.Types.LONGVARCHAR:
761  case java.sql.Types.LONGNVARCHAR:
762  case java.sql.Types.OTHER:
763  col.data.str_col = new ArrayList<String>(bufferSize);
764  break;
765 
766  default:
767  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
768  }
769  return col;
770  }
771 
772  private void setColValue(ResultSet rs,
773  TColumn col,
774  int columnType,
775  int colNum,
776  int scale,
777  String colTypeName) throws SQLException {
778  switch (columnType) {
779  case java.sql.Types
780  .BIT: // deal with postgress treating boolean as bit... this will bite me
781  case java.sql.Types.BOOLEAN:
782  Boolean b = rs.getBoolean(colNum);
783  if (rs.wasNull()) {
784  col.nulls.add(Boolean.TRUE);
785  col.data.int_col.add(0L);
786  } else {
787  col.nulls.add(Boolean.FALSE);
788  col.data.int_col.add(b ? 1L : 0L);
789  }
790  break;
791 
792  case java.sql.Types.DECIMAL:
793  case java.sql.Types.NUMERIC:
794  BigDecimal bd = rs.getBigDecimal(colNum);
795  if (rs.wasNull()) {
796  col.nulls.add(Boolean.TRUE);
797  col.data.int_col.add(0L);
798  } else {
799  col.nulls.add(Boolean.FALSE);
800  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
801  }
802  break;
803 
804  case java.sql.Types.TINYINT:
805  case java.sql.Types.SMALLINT:
806  case java.sql.Types.INTEGER:
807  case java.sql.Types.BIGINT:
808  Long l = rs.getLong(colNum);
809  if (rs.wasNull()) {
810  col.nulls.add(Boolean.TRUE);
811  col.data.int_col.add(new Long(0));
812  } else {
813  col.nulls.add(Boolean.FALSE);
814  col.data.int_col.add(l);
815  }
816  break;
817 
818  case java.sql.Types.TIME:
819  Time t = rs.getTime(colNum);
820  if (rs.wasNull()) {
821  col.nulls.add(Boolean.TRUE);
822  col.data.int_col.add(0L);
823 
824  } else {
825  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
826  col.nulls.add(Boolean.FALSE);
827  }
828 
829  break;
830  case java.sql.Types.TIMESTAMP:
831  Timestamp ts = rs.getTimestamp(colNum);
832  if (rs.wasNull()) {
833  col.nulls.add(Boolean.TRUE);
834  col.data.int_col.add(0L);
835 
836  } else {
837  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
838  col.nulls.add(Boolean.FALSE);
839  }
840 
841  break;
842  case java.sql.Types.DATE:
843  Date d = rs.getDate(colNum);
844  if (rs.wasNull()) {
845  col.nulls.add(Boolean.TRUE);
846  col.data.int_col.add(0L);
847 
848  } else {
849  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
850  col.nulls.add(Boolean.FALSE);
851  }
852  break;
853  case java.sql.Types.FLOAT:
854  case java.sql.Types.DOUBLE:
855  case java.sql.Types.REAL:
856  Double db = rs.getDouble(colNum);
857  if (rs.wasNull()) {
858  col.nulls.add(Boolean.TRUE);
859  col.data.real_col.add(new Double(0));
860 
861  } else {
862  col.nulls.add(Boolean.FALSE);
863  col.data.real_col.add(db);
864  }
865  break;
866 
867  case java.sql.Types.NVARCHAR:
868  case java.sql.Types.VARCHAR:
869  case java.sql.Types.NCHAR:
870  case java.sql.Types.CHAR:
871  case java.sql.Types.LONGVARCHAR:
872  case java.sql.Types.LONGNVARCHAR:
873  String strVal = rs.getString(colNum);
874  if (rs.wasNull()) {
875  col.nulls.add(Boolean.TRUE);
876  col.data.str_col.add("");
877 
878  } else {
879  col.data.str_col.add(strVal);
880  col.nulls.add(Boolean.FALSE);
881  }
882  break;
883  case java.sql.Types.OTHER:
884  if (rs.wasNull()) {
885  col.nulls.add(Boolean.TRUE);
886  col.data.str_col.add("");
887  } else {
888  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
889  col.nulls.add(Boolean.FALSE);
890  }
891  break;
892  default:
893  throw new AssertionError("Column type " + columnType + " not Supported");
894  }
895  }
896 
897  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
898  throws SQLException {
899  col.nulls.clear();
900 
901  switch (md.getColumnType(i)) {
902  case java.sql.Types.TINYINT:
903  case java.sql.Types.SMALLINT:
904  case java.sql.Types.INTEGER:
905  case java.sql.Types.BIGINT:
906  case java.sql.Types.TIME:
907  case java.sql.Types.TIMESTAMP:
908  case java.sql.Types
909  .BIT: // deal with postgress treating boolean as bit... this will bite me
910  case java.sql.Types.BOOLEAN:
911  case java.sql.Types.DATE:
912  case java.sql.Types.DECIMAL:
913  case java.sql.Types.NUMERIC:
914  col.data.int_col.clear();
915  break;
916 
917  case java.sql.Types.FLOAT:
918  case java.sql.Types.DOUBLE:
919  case java.sql.Types.REAL:
920  col.data.real_col.clear();
921  break;
922 
923  case java.sql.Types.NVARCHAR:
924  case java.sql.Types.VARCHAR:
925  case java.sql.Types.NCHAR:
926  case java.sql.Types.CHAR:
927  case java.sql.Types.LONGVARCHAR:
928  case java.sql.Types.LONGNVARCHAR:
929  col.data.str_col.clear();
930  break;
931 
932  // Handle WKT for geo columns
933  case java.sql.Types.OTHER:
934  col.data.str_col.clear();
935  break;
936 
937  default:
938  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
939  }
940  }
941 }
static com.mapd.utility.db_vendors.Db_vendor_types Db_vendor_factory(String connection_str)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
TTransport openHttpClientTransport(String server_host, int port)
TTransport openHttpsClientTransport(String server_host, int port)
void help(Options options)
TTransport openClientTransport(String server_host, int port)
static void main(String[] args)
abstract String get_wkt(ResultSet rs, int column_number, String gis_type_name)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
size_t append(FILE *f, const size_t size, int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:135
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
abstract String find_gis_type(Connection conn, ResultSetMetaData metadata, int column_number)
static SockTransportProperties getUnencryptedClient()
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
static SockTransportProperties getEncryptedClientDefaultTrustStore(boolean validate_server_name)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)