OmniSciDB  04ee39c94c
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
22 import com.mapd.thrift.server.MapD;
23 import com.mapd.thrift.server.TColumn;
24 import com.mapd.thrift.server.TColumnData;
25 import com.mapd.thrift.server.TColumnType;
26 import com.mapd.thrift.server.TMapDException;
27 import com.mapd.thrift.server.TQueryResult;
28 import com.mapd.thrift.server.TTableDetails;
30 
31 import org.apache.commons.cli.*;
32 import org.apache.thrift.TException;
33 import org.apache.thrift.protocol.TBinaryProtocol;
34 import org.apache.thrift.protocol.TJSONProtocol;
35 import org.apache.thrift.protocol.TProtocol;
36 import org.apache.thrift.transport.TSocket;
37 import org.apache.thrift.transport.TTransport;
38 import org.apache.thrift.transport.TTransportException;
39 import org.slf4j.Logger;
40 import org.slf4j.LoggerFactory;
41 
42 import java.io.BufferedReader;
43 import java.io.FileReader;
44 import java.io.IOException;
45 import java.math.BigDecimal;
46 import java.security.KeyStore;
47 import java.sql.*;
48 import java.time.*;
49 import java.util.ArrayList;
50 import java.util.List;
51 
52 interface DateTimeUtils {
53  long getSecondsFromMilliseconds(long milliseconds);
54 }
55 
56 class MutuallyExlusiveOptionsException extends ParseException {
58  super(message);
59  }
60 
61  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
62  StringBuffer sb = new StringBuffer(
63  "Mutually exclusive options used. " + errMsg + ". Options provided [");
64  for (String s : strings) {
65  sb.append(s);
66  sb.append(" ");
67  }
68  sb.setCharAt(sb.length() - 1, ']');
69  return new MutuallyExlusiveOptionsException(sb.toString());
70  }
71 }
72 
74  private Options options = new Options();
75 
76  void printVersion() {
77  System.out.println("SQLImporter Version 4.6.0");
78  }
79 
81  StringBuffer sb = new StringBuffer("\nSQLImporter ");
82  // Ready for PKI auth
83  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
84  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
85  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
86  // sb.append("([--ca-trust-store <ca trust store file name>]
87  // --ca-trust-store-password
88  // <trust store password> | --insecure)\n");
89  sb.append(
90  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
91  sb.append(
92  "-su <other database user> -sp <other database user password> -su <other database sql statement>\n");
93  sb.append(
94  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
95  sb.append("[-tr] -i <init commands file>\n");
96  sb.append("\nSQLImporter -h | --help\n\n");
97 
98  HelpFormatter formatter = new HelpFormatter();
99  // Forces help to print out options in order they were added rather
100  // than in alphabetical order
101  formatter.setOptionComparator(null);
102  int help_width = 100;
103  formatter.printHelp(help_width, sb.toString(), "", options, "");
104  }
105 
107  options.addOption("r", true, "Row Load Limit");
108 
109  // OmniSci authentication options
110  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
111  options.addOption(
112  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
113  options.addOption(Option.builder("p")
114  .hasArg()
115  .desc("OmniSci Password")
116  .longOpt("passwd")
117  .build());
118  // OmniSci transport options
119  OptionGroup transport_grp = new OptionGroup();
120  transport_grp.addOption(Option.builder()
121  .desc("use binary transport to connect to OmniSci ")
122  .longOpt("binary")
123  .build());
124  transport_grp.addOption(Option.builder()
125  .desc("use http transport to connect to OmniSci ")
126  .longOpt("http")
127  .build());
128  transport_grp.addOption(Option.builder()
129  .desc("use https transport to connect to OmniSci ")
130  .longOpt("https")
131  .build());
132  options.addOptionGroup(transport_grp);
133 
134  // OmniSci database server details
135  options.addOption(Option.builder("s")
136  .hasArg()
137  .desc("OmniSci Server")
138  .longOpt("server")
139  .build());
140  options.addOption(Option.builder("db")
141  .hasArg()
142  .desc("OmniSci Database")
143  .longOpt("database")
144  .build());
145  options.addOption(
146  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
147 
148  // OmniSci server authentication options
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store")
152  .longOpt("ca-trust-store")
153  .build());
154  options.addOption(Option.builder()
155  .hasArg()
156  .desc("CA certificate trust store password")
157  .longOpt("ca-trust-store-passwd")
158  .build());
159  options.addOption(
160  Option.builder()
161  .desc("Inseure TLS - do not validate server OmniSci server credentials")
162  .longOpt("insecure")
163  .build());
164 
165  // Other database connection details
166  options.addOption(Option.builder("d")
167  .hasArg()
168  .desc("JDBC driver class")
169  .longOpt("driver")
170  .build());
171  options.addOption(Option.builder("c")
172  .hasArg()
173  .desc("JDBC Connection string")
174  .longOpt("jdbcConnect")
175  .required()
176  .build());
177  options.addOption(Option.builder("su")
178  .hasArg()
179  .desc("Source User")
180  .longOpt("sourceUser")
181  .required()
182  .build());
183  options.addOption(Option.builder("sp")
184  .hasArg()
185  .desc("Source Password")
186  .longOpt("sourcePasswd")
187  .required()
188  .build());
189  options.addOption(Option.builder("ss")
190  .hasArg()
191  .desc("SQL Select statement")
192  .longOpt("sqlStmt")
193  .required()
194  .build());
195 
196  options.addOption(Option.builder("t")
197  .hasArg()
198  .desc("OmniSci Target Table")
199  .longOpt("targetTable")
200  .required()
201  .build());
202 
203  options.addOption(Option.builder("b")
204  .hasArg()
205  .desc("transfer buffer size")
206  .longOpt("bufferSize")
207  .build());
208  options.addOption(Option.builder("f")
209  .hasArg()
210  .desc("table fragment size")
211  .longOpt("fragmentSize")
212  .build());
213 
214  options.addOption(Option.builder("tr")
215  .desc("Truncate table if it exists")
216  .longOpt("truncate")
217  .build());
218  options.addOption(Option.builder("i")
219  .hasArg()
220  .desc("File containing init command for DB")
221  .longOpt("initializeFile")
222  .build());
223  }
224 
225  private Option setOptionRequired(Option option) {
226  option.setRequired(true);
227  return option;
228  }
229 
230  public CommandLine parse(String[] args) throws ParseException {
231  CommandLineParser clp = new DefaultParser() {
232  public CommandLine parse(Options options, String[] strings) throws ParseException {
233  Options helpOptions = new Options();
234  helpOptions.addOption(
235  Option.builder("h").desc("help message").longOpt("help").build());
236  try {
237  CommandLine cmd = super.parse(helpOptions, strings);
238  } catch (UnrecognizedOptionException uE) {
239  }
240  if (cmd.hasOption("help")) {
241  printHelpMessage();
242  exit(0);
243  }
244  if (cmd.hasOption("version")) {
245  printVersion();
246  exit(0);
247  }
248  cmd = super.parse(options, strings);
249  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
250  throw new MissingArgumentException(
251  "Must supply either an OmniSci db user or a user certificate");
252  }
253  // if user supplied must have password and visa versa
254  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
255  options.addOption(setOptionRequired(options.getOption("user")));
256  options.addOption(setOptionRequired(options.getOption("passwd")));
257  super.parse(options, strings);
258  }
259 
260  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
261  // and
262  // visa versa
263  if (false) {
264  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
265  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
266  options.addOption(
267  setOptionRequired(options.getOption("ca-trust-store-password")));
268  super.parse(options, strings);
269  }
270  if (options.getOption("user").isRequired()
271  && options.getOption("client-key").isRequired()) {
274  "user/password can not be use with client-cert/client-key",
275  strings);
276  throw meo;
277  }
278 
279  if (cmd.hasOption("http")
280  || cmd.hasOption("binary")
281  && (cmd.hasOption("client-cert")
282  || cmd.hasOption("client-key"))) {
284  "http|binary can not be use with ca-cert|client-cert|client-key",
285  strings);
286  }
287  }
288 
289  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
291  "insecure can only be use with https", strings);
292  throw meo;
293  }
294 
295  return cmd;
296  }
297 
298  public CommandLine parse(Options options, String[] strings, boolean b)
299  throws ParseException {
300  return null;
301  }
302  };
303  return clp.parse(options, args);
304  }
305 }
306 
307 public class SQLImporter {
308  protected String session = null;
309  protected MapD.Client client = null;
310  private CommandLine cmd = null;
311  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
312  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
313  return milliseconds / 1000;
314  };
315 
316  Db_vendor_types vendor_types = null;
317 
318  public static void main(String[] args) {
319  SQLImporter sq = new SQLImporter();
320  sq.doWork(args);
321  }
322 
323  void doWork(String[] args) {
324  // create Options object
325 
326  SQLImporter_args s_args = new SQLImporter_args();
327 
328  try {
329  cmd = s_args.parse(args);
330  } catch (ParseException ex) {
331  LOGGER.error(ex.getLocalizedMessage());
332  s_args.printHelpMessage();
333  exit(0);
334  }
335  executeQuery();
336  }
337 
338  void executeQuery() {
339  Connection conn = null;
340  Statement stmt = null;
341 
342  long totalTime = 0;
343 
344  try {
345  // Open a connection
346  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
347  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
348  cmd.getOptionValue("sourceUser"),
349  cmd.getOptionValue("sourcePasswd"));
350  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
351  long startTime = System.currentTimeMillis();
352 
353  // run init file script on targe DB if present
354  if (cmd.hasOption("initializeFile")) {
355  run_init(conn);
356  }
357 
358  // set autocommit off to allow postgress to not load all results
359  try {
360  conn.setAutoCommit(false);
361  } catch (SQLException se) {
362  LOGGER.warn(
363  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
364  + se.toString());
365  }
366 
367  // Execute a query
368  stmt = conn.createStatement();
369 
370  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
371  // set the jdbc fetch buffer size to reduce the amount of records being moved to
372  // java from postgress
373  stmt.setFetchSize(bufferSize);
374  long timer;
375 
376  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
377 
378  // check if table already exists and is compatible in OmniSci with the query
379  // metadata
380  ResultSetMetaData md = rs.getMetaData();
381  checkMapDTable(conn, md);
382 
383  timer = System.currentTimeMillis();
384 
385  long resultCount = 0;
386  int bufferCount = 0;
387  long total = 0;
388 
389  List<TColumn> cols = new ArrayList(md.getColumnCount());
390  for (int i = 1; i <= md.getColumnCount(); i++) {
391  TColumn col = setupBinaryColumn(i, md, bufferSize);
392  cols.add(col);
393  }
394 
395  // read data from old DB
396  while (rs.next()) {
397  for (int i = 1; i <= md.getColumnCount(); i++) {
398  setColValue(rs,
399  cols.get(i - 1),
400  md.getColumnType(i),
401  i,
402  md.getScale(i),
403  md.getColumnTypeName(i));
404  }
405  resultCount++;
406  bufferCount++;
407  if (bufferCount == bufferSize) {
408  bufferCount = 0;
409  // send the buffer to mapD
410  client.load_table_binary_columnar(
411  session, cmd.getOptionValue("targetTable"), cols); // old
412  // recreate columnar store for use
413  for (int i = 1; i <= md.getColumnCount(); i++) {
414  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
415  }
416 
417  if (resultCount % 100000 == 0) {
418  LOGGER.info("Imported " + resultCount + " records");
419  }
420  }
421  }
422  if (bufferCount > 0) {
423  // send the LAST buffer to mapD
424  client.load_table_binary_columnar(
425  session, cmd.getOptionValue("targetTable"), cols);
426  bufferCount = 0;
427  }
428  LOGGER.info("result set count is " + resultCount + " read time is "
429  + (System.currentTimeMillis() - timer) + "ms");
430 
431  // Clean-up environment
432  rs.close();
433  stmt.close();
434 
435  totalTime = System.currentTimeMillis() - startTime;
436  conn.close();
437  } catch (SQLException se) {
438  LOGGER.error("SQLException - " + se.toString());
439  se.printStackTrace();
440  } catch (TMapDException ex) {
441  LOGGER.error("TMapDException - " + ex.toString());
442  ex.printStackTrace();
443  } catch (TException ex) {
444  LOGGER.error("TException failed - " + ex.toString());
445  ex.printStackTrace();
446  } finally {
447  // finally block used to close resources
448  try {
449  if (stmt != null) {
450  stmt.close();
451  }
452  } catch (SQLException se2) {
453  } // nothing we can do
454  try {
455  if (conn != null) {
456  conn.close();
457  }
458  } catch (SQLException se) {
459  LOGGER.error("SQlException in close - " + se.toString());
460  se.printStackTrace();
461  } // end finally try
462  } // end try
463  }
464 
465  private void run_init(Connection conn) {
466  // attempt to open file
467  String line = "";
468  try {
469  BufferedReader reader =
470  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
471  Statement stmt = conn.createStatement();
472  while ((line = reader.readLine()) != null) {
473  if (line.isEmpty()) {
474  continue;
475  }
476  LOGGER.info("Running : " + line);
477  stmt.execute(line);
478  }
479  stmt.close();
480  reader.close();
481  } catch (IOException e) {
482  LOGGER.error("Exception occurred trying to read initialize file: "
483  + cmd.getOptionValue("initFile"));
484  exit(1);
485  } catch (SQLException e) {
486  LOGGER.error(
487  "Exception occurred trying to execute initialize file entry : " + line);
488  exit(1);
489  }
490  }
491 
492  private void help(Options options) {
493  // automatically generate the help statement
494  HelpFormatter formatter = new HelpFormatter();
495  formatter.setOptionComparator(null); // get options in the order they are created
496  formatter.printHelp("SQLImporter", options);
497  }
498 
499  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
500  throws SQLException {
501  createMapDConnection();
502  String tName = cmd.getOptionValue("targetTable");
503 
504  if (tableExists(tName)) {
505  // check if we want to truncate
506  if (cmd.hasOption("truncate")) {
507  executeMapDCommand("Drop table " + tName);
508  createMapDTable(otherdb_conn, md);
509  } else {
510  List<TColumnType> columnInfo = getColumnInfo(tName);
511  // table exists lets check it has same number of columns
512 
513  if (md.getColumnCount() != columnInfo.size()) {
514  LOGGER.error("Table sizes do not match - OmniSci " + columnInfo.size()
515  + " versus Select " + md.getColumnCount());
516  exit(1);
517  }
518  // table exists lets check it is same layout - check names will do for now
519  // Note weird start from 1 and reduce index by one is due to sql metatdata
520  // beinging with 1 not 0
521  for (int colNum = 1; colNum <= columnInfo.size(); colNum++) {
522  if (!columnInfo.get(colNum - 1)
523  .col_name.equalsIgnoreCase(md.getColumnName(colNum))) {
524  LOGGER.error(
525  "OmniSci Table does not have matching column in same order for column number"
526  + colNum + " OmniSci column name is "
527  + columnInfo.get(colNum - 1).col_name + " versus Select "
528  + md.getColumnName(colNum));
529  exit(1);
530  }
531  }
532  }
533  } else {
534  createMapDTable(otherdb_conn, md);
535  }
536  }
537 
538  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
539  StringBuilder sb = new StringBuilder();
540  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
541 
542  // Now iterate the metadata
543  try {
544  for (int i = 1; i <= metaData.getColumnCount(); i++) {
545  if (i > 1) {
546  sb.append(",");
547  }
548  LOGGER.debug("Column name is " + metaData.getColumnName(i));
549  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
550  LOGGER.debug("Column type is " + metaData.getColumnType(i));
551 
552  sb.append(metaData.getColumnName(i)).append(" ");
553  int col_type = metaData.getColumnType(i);
554  if (col_type == java.sql.Types.OTHER) {
555  sb.append(vendor_types.find_gis_type(otherdb_conn,
556  metaData.getColumnName(i),
557  metaData.getColumnTypeName(i)));
558  } else {
559  sb.append(getColType(metaData.getColumnType(i),
560  metaData.getPrecision(i),
561  metaData.getScale(i)));
562  }
563  }
564  sb.append(")");
565 
566  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
567  sb.append(" with (fragment_size = ");
568  sb.append(cmd.getOptionValue("fragmentSize", "0"));
569  sb.append(")");
570  }
571 
572  } catch (SQLException ex) {
573  LOGGER.error("Error processing the metadata - " + ex.toString());
574  exit(1);
575  }
576 
577  executeMapDCommand(sb.toString());
578  }
579 
580  private void createMapDConnection() {
581  TTransport transport = null;
582  TProtocol protocol = new TBinaryProtocol(transport);
583  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
584  String server = cmd.getOptionValue("server", "localhost");
585  try {
586  // Uses default certificate stores.
587  boolean load_trust_store = cmd.hasOption("https");
588  SockTransportProperties skT = null;
589  if (cmd.hasOption("https")) {
590  skT = new SockTransportProperties(load_trust_store & !cmd.hasOption("insecure"));
591  transport = skT.openHttpsClientTransport(server, port);
592  transport.open();
593  protocol = new TJSONProtocol(transport);
594  } else if (cmd.hasOption("http")) {
595  skT = new SockTransportProperties(load_trust_store);
596  transport = skT.openHttpClientTransport(server, port);
597  protocol = new TJSONProtocol(transport);
598  } else {
599  skT = new SockTransportProperties(load_trust_store);
600  transport = skT.openClientTransport(server, port);
601  transport.open();
602  protocol = new TBinaryProtocol(transport);
603  }
604 
605  client = new MapD.Client(protocol);
606  // This if will be useless until PKI signon
607  if (cmd.hasOption("user")) {
608  session = client.connect(cmd.getOptionValue("user", "admin"),
609  cmd.getOptionValue("passwd", "HyperInteractive"),
610  cmd.getOptionValue("database", "omnisci"));
611  }
612  LOGGER.debug("Connected session is " + session);
613 
614  } catch (TTransportException ex) {
615  LOGGER.error("Connection failed - " + ex.toString());
616  exit(1);
617  } catch (TMapDException ex) {
618  LOGGER.error("Connection failed - " + ex.toString());
619  exit(2);
620  } catch (TException ex) {
621  LOGGER.error("Connection failed - " + ex.toString());
622  exit(3);
623  } catch (Exception ex) {
624  LOGGER.error("General exception - " + ex.toString());
625  exit(4);
626  }
627  }
628 
629  private List<TColumnType> getColumnInfo(String tName) {
630  LOGGER.debug("Getting columns for " + tName);
631  List<TColumnType> row_descriptor = null;
632  try {
633  TTableDetails table_details = client.get_table_details(session, tName);
634  row_descriptor = table_details.row_desc;
635  } catch (TMapDException ex) {
636  LOGGER.error("column check failed - " + ex.toString());
637  exit(3);
638  } catch (TException ex) {
639  LOGGER.error("column check failed - " + ex.toString());
640  exit(3);
641  }
642  return row_descriptor;
643  }
644 
645  private boolean tableExists(String tName) {
646  LOGGER.debug("Check for table " + tName);
647  try {
648  List<String> recv_get_tables = client.get_tables(session);
649  for (String s : recv_get_tables) {
650  if (s.equals(tName)) {
651  return true;
652  }
653  }
654  } catch (TMapDException ex) {
655  LOGGER.error("Table check failed - " + ex.toString());
656  exit(3);
657  } catch (TException ex) {
658  LOGGER.error("Table check failed - " + ex.toString());
659  exit(3);
660  }
661  return false;
662  }
663 
664  private void executeMapDCommand(String sql) {
665  LOGGER.info(" run comamnd :" + sql);
666 
667  try {
668  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
669  } catch (TMapDException ex) {
670  LOGGER.error("SQL Execute failed - " + ex.toString());
671  exit(1);
672  } catch (TException ex) {
673  LOGGER.error("SQL Execute failed - " + ex.toString());
674  exit(1);
675  }
676  }
677 
678  private String getColType(int cType, int precision, int scale) {
679  // Note - if cType is OTHER a earlier call will have been made
680  // to try and work out the db vendors specific type.
681  if (precision > 19) {
682  precision = 19;
683  }
684  if (scale > 19) {
685  scale = 18;
686  }
687  switch (cType) {
688  case java.sql.Types.TINYINT:
689  return ("TINYINT");
690  case java.sql.Types.SMALLINT:
691  return ("SMALLINT");
692  case java.sql.Types.INTEGER:
693  return ("INTEGER");
694  case java.sql.Types.BIGINT:
695  return ("BIGINT");
696  case java.sql.Types.FLOAT:
697  return ("FLOAT");
698  case java.sql.Types.DECIMAL:
699  return ("DECIMAL(" + precision + "," + scale + ")");
700  case java.sql.Types.DOUBLE:
701  return ("DOUBLE");
702  case java.sql.Types.REAL:
703  return ("REAL");
704  case java.sql.Types.NUMERIC:
705  return ("NUMERIC(" + precision + "," + scale + ")");
706  case java.sql.Types.TIME:
707  return ("TIME");
708  case java.sql.Types.TIMESTAMP:
709  return ("TIMESTAMP");
710  case java.sql.Types.DATE:
711  return ("DATE");
712  case java.sql.Types.BOOLEAN:
713  case java.sql.Types
714  .BIT: // deal with postgress treating boolean as bit... this will bite me
715  return ("BOOLEAN");
716  case java.sql.Types.NVARCHAR:
717  case java.sql.Types.VARCHAR:
718  case java.sql.Types.NCHAR:
719  case java.sql.Types.CHAR:
720  case java.sql.Types.LONGVARCHAR:
721  case java.sql.Types.LONGNVARCHAR:
722  return ("TEXT ENCODING DICT");
723  default:
724  throw new AssertionError("Column type " + cType + " not Supported");
725  }
726  }
727 
728  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
729  throws SQLException {
730  TColumn col = new TColumn();
731 
732  col.nulls = new ArrayList<Boolean>(bufferSize);
733 
734  col.data = new TColumnData();
735 
736  switch (md.getColumnType(i)) {
737  case java.sql.Types.TINYINT:
738  case java.sql.Types.SMALLINT:
739  case java.sql.Types.INTEGER:
740  case java.sql.Types.BIGINT:
741  case java.sql.Types.TIME:
742  case java.sql.Types.TIMESTAMP:
743  case java.sql.Types
744  .BIT: // deal with postgress treating boolean as bit... this will bite me
745  case java.sql.Types.BOOLEAN:
746  case java.sql.Types.DATE:
747  case java.sql.Types.DECIMAL:
748  case java.sql.Types.NUMERIC:
749  col.data.int_col = new ArrayList<Long>(bufferSize);
750  break;
751 
752  case java.sql.Types.FLOAT:
753  case java.sql.Types.DOUBLE:
754  case java.sql.Types.REAL:
755  col.data.real_col = new ArrayList<Double>(bufferSize);
756  break;
757 
758  case java.sql.Types.NVARCHAR:
759  case java.sql.Types.VARCHAR:
760  case java.sql.Types.NCHAR:
761  case java.sql.Types.CHAR:
762  case java.sql.Types.LONGVARCHAR:
763  case java.sql.Types.LONGNVARCHAR:
764  case java.sql.Types.OTHER:
765  col.data.str_col = new ArrayList<String>(bufferSize);
766  break;
767 
768  default:
769  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
770  }
771  return col;
772  }
773 
774  private void setColValue(ResultSet rs,
775  TColumn col,
776  int columnType,
777  int colNum,
778  int scale,
779  String colTypeName) throws SQLException {
780  switch (columnType) {
781  case java.sql.Types
782  .BIT: // deal with postgress treating boolean as bit... this will bite me
783  case java.sql.Types.BOOLEAN:
784  Boolean b = rs.getBoolean(colNum);
785  if (rs.wasNull()) {
786  col.nulls.add(Boolean.TRUE);
787  col.data.int_col.add(0L);
788  } else {
789  col.nulls.add(Boolean.FALSE);
790  col.data.int_col.add(b ? 1L : 0L);
791  }
792  break;
793 
794  case java.sql.Types.DECIMAL:
795  case java.sql.Types.NUMERIC:
796  BigDecimal bd = rs.getBigDecimal(colNum);
797  if (rs.wasNull()) {
798  col.nulls.add(Boolean.TRUE);
799  col.data.int_col.add(0L);
800  } else {
801  col.nulls.add(Boolean.FALSE);
802  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
803  }
804  break;
805 
806  case java.sql.Types.TINYINT:
807  case java.sql.Types.SMALLINT:
808  case java.sql.Types.INTEGER:
809  case java.sql.Types.BIGINT:
810  Long l = rs.getLong(colNum);
811  if (rs.wasNull()) {
812  col.nulls.add(Boolean.TRUE);
813  col.data.int_col.add(new Long(0));
814  } else {
815  col.nulls.add(Boolean.FALSE);
816  col.data.int_col.add(l);
817  }
818  break;
819 
820  case java.sql.Types.TIME:
821  Time t = rs.getTime(colNum);
822  if (rs.wasNull()) {
823  col.nulls.add(Boolean.TRUE);
824  col.data.int_col.add(0L);
825 
826  } else {
827  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
828  col.nulls.add(Boolean.FALSE);
829  }
830 
831  break;
832  case java.sql.Types.TIMESTAMP:
833  Timestamp ts = rs.getTimestamp(colNum);
834  if (rs.wasNull()) {
835  col.nulls.add(Boolean.TRUE);
836  col.data.int_col.add(0L);
837 
838  } else {
839  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
840  col.nulls.add(Boolean.FALSE);
841  }
842 
843  break;
844  case java.sql.Types.DATE:
845  Date d = rs.getDate(colNum);
846  if (rs.wasNull()) {
847  col.nulls.add(Boolean.TRUE);
848  col.data.int_col.add(0L);
849 
850  } else {
851  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
852  col.nulls.add(Boolean.FALSE);
853  }
854  break;
855  case java.sql.Types.FLOAT:
856  case java.sql.Types.DOUBLE:
857  case java.sql.Types.REAL:
858  Double db = rs.getDouble(colNum);
859  if (rs.wasNull()) {
860  col.nulls.add(Boolean.TRUE);
861  col.data.real_col.add(new Double(0));
862 
863  } else {
864  col.nulls.add(Boolean.FALSE);
865  col.data.real_col.add(db);
866  }
867  break;
868 
869  case java.sql.Types.NVARCHAR:
870  case java.sql.Types.VARCHAR:
871  case java.sql.Types.NCHAR:
872  case java.sql.Types.CHAR:
873  case java.sql.Types.LONGVARCHAR:
874  case java.sql.Types.LONGNVARCHAR:
875  String strVal = rs.getString(colNum);
876  if (rs.wasNull()) {
877  col.nulls.add(Boolean.TRUE);
878  col.data.str_col.add("");
879 
880  } else {
881  col.data.str_col.add(strVal);
882  col.nulls.add(Boolean.FALSE);
883  }
884  break;
885  case java.sql.Types.OTHER:
886  if (rs.wasNull()) {
887  col.nulls.add(Boolean.TRUE);
888  col.data.str_col.add("");
889  } else {
890  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
891  col.nulls.add(Boolean.FALSE);
892  }
893  break;
894  default:
895  throw new AssertionError("Column type " + columnType + " not Supported");
896  }
897  }
898 
899  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
900  throws SQLException {
901  col.nulls.clear();
902 
903  switch (md.getColumnType(i)) {
904  case java.sql.Types.TINYINT:
905  case java.sql.Types.SMALLINT:
906  case java.sql.Types.INTEGER:
907  case java.sql.Types.BIGINT:
908  case java.sql.Types.TIME:
909  case java.sql.Types.TIMESTAMP:
910  case java.sql.Types
911  .BIT: // deal with postgress treating boolean as bit... this will bite me
912  case java.sql.Types.BOOLEAN:
913  case java.sql.Types.DATE:
914  case java.sql.Types.DECIMAL:
915  case java.sql.Types.NUMERIC:
916  col.data.int_col.clear();
917  break;
918 
919  case java.sql.Types.FLOAT:
920  case java.sql.Types.DOUBLE:
921  case java.sql.Types.REAL:
922  col.data.real_col.clear();
923  break;
924 
925  case java.sql.Types.NVARCHAR:
926  case java.sql.Types.VARCHAR:
927  case java.sql.Types.NCHAR:
928  case java.sql.Types.CHAR:
929  case java.sql.Types.LONGVARCHAR:
930  case java.sql.Types.LONGNVARCHAR:
931  col.data.str_col.clear();
932  break;
933 
934  default:
935  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
936  }
937  }
938 }
static com.mapd.utility.db_vendors.Db_vendor_types Db_vendor_factory(String connection_str)
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
void d(const SQLTypes expected_type, const std::string &str)
Definition: ImportTest.cpp:268
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
TTransport openHttpClientTransport(String server_host, int port)
TTransport openHttpsClientTransport(String server_host, int port)
auto sql(const std::string &sql_stmts)
void help(Options options)
TTransport openClientTransport(String server_host, int port)
static void main(String[] args)
abstract String get_wkt(ResultSet rs, int column_number, String gis_type_name)
void doWork(String[] args)
mapd::shared_ptr< MapDClient > client
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
size_t append(FILE *f, const size_t size, int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:136
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
boolean tableExists(String tName)
abstract String find_gis_type(Connection conn, String source_column_name, String source_column_type_name)
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
TSessionId session
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)