OmniSciDB  1dac507f6e
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
SQLImporter.java
Go to the documentation of this file.
1 /*
2  * Copyright 2017 MapD Technologies, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.utility;
17 
18 import static java.lang.Math.pow;
19 import static java.lang.System.exit;
20 
22 import com.mapd.thrift.server.MapD;
23 import com.mapd.thrift.server.TColumn;
24 import com.mapd.thrift.server.TColumnData;
25 import com.mapd.thrift.server.TColumnType;
26 import com.mapd.thrift.server.TMapDException;
27 import com.mapd.thrift.server.TQueryResult;
28 import com.mapd.thrift.server.TTableDetails;
30 
31 import org.apache.commons.cli.*;
32 import org.apache.thrift.TException;
33 import org.apache.thrift.protocol.TBinaryProtocol;
34 import org.apache.thrift.protocol.TJSONProtocol;
35 import org.apache.thrift.protocol.TProtocol;
36 import org.apache.thrift.transport.TSocket;
37 import org.apache.thrift.transport.TTransport;
38 import org.apache.thrift.transport.TTransportException;
39 import org.slf4j.Logger;
40 import org.slf4j.LoggerFactory;
41 
42 import java.io.BufferedReader;
43 import java.io.FileReader;
44 import java.io.IOException;
45 import java.math.BigDecimal;
46 import java.security.KeyStore;
47 import java.sql.*;
48 import java.time.*;
49 import java.util.ArrayList;
50 import java.util.List;
51 
52 interface DateTimeUtils {
53  long getSecondsFromMilliseconds(long milliseconds);
54 }
55 
56 class MutuallyExlusiveOptionsException extends ParseException {
58  super(message);
59  }
60 
61  public static MutuallyExlusiveOptionsException create(String errMsg, String[] strings) {
62  StringBuffer sb = new StringBuffer(
63  "Mutually exclusive options used. " + errMsg + ". Options provided [");
64  for (String s : strings) {
65  sb.append(s);
66  sb.append(" ");
67  }
68  sb.setCharAt(sb.length() - 1, ']');
69  return new MutuallyExlusiveOptionsException(sb.toString());
70  }
71 }
72 
74  private Options options = new Options();
75 
76  void printVersion() {
77  System.out.println("SQLImporter Version 4.6.0");
78  }
79 
81  StringBuffer sb = new StringBuffer("\nSQLImporter ");
82  // Ready for PKI auth
83  // sb.append("(-u <userid> -p <password> | --client-cert <key store filename>
84  sb.append("-u <userid> -p <password> [(--binary|--http|--https [--insecure])]\n");
85  sb.append("-s <omnisci server host> -db <omnisci db> --port <omnisci server port>\n");
86  // sb.append("([--ca-trust-store <ca trust store file name>]
87  // --ca-trust-store-password
88  // <trust store password> | --insecure)\n");
89  sb.append(
90  "[-d <other database JDBC drive class>] -c <other database JDBC connection string>\n");
91  sb.append(
92  "-su <other database user> -sp <other database user password> -su <other database sql statement>\n");
93  sb.append(
94  "-t <OmniSci target table> -b <transfer buffer size> -f <table fragment size>\n");
95  sb.append("[-tr] -i <init commands file>\n");
96  sb.append("\nSQLImporter -h | --help\n\n");
97 
98  HelpFormatter formatter = new HelpFormatter();
99  // Forces help to print out options in order they were added rather
100  // than in alphabetical order
101  formatter.setOptionComparator(null);
102  int help_width = 100;
103  formatter.printHelp(help_width, sb.toString(), "", options, "");
104  }
105 
107  options.addOption("r", true, "Row Load Limit");
108 
109  // OmniSci authentication options
110  options.addOption(Option.builder("h").desc("help message").longOpt("help").build());
111  options.addOption(
112  Option.builder("u").hasArg().desc("OmniSci User").longOpt("user").build());
113  options.addOption(Option.builder("p")
114  .hasArg()
115  .desc("OmniSci Password")
116  .longOpt("passwd")
117  .build());
118  // OmniSci transport options
119  OptionGroup transport_grp = new OptionGroup();
120  transport_grp.addOption(Option.builder()
121  .desc("use binary transport to connect to OmniSci ")
122  .longOpt("binary")
123  .build());
124  transport_grp.addOption(Option.builder()
125  .desc("use http transport to connect to OmniSci ")
126  .longOpt("http")
127  .build());
128  transport_grp.addOption(Option.builder()
129  .desc("use https transport to connect to OmniSci ")
130  .longOpt("https")
131  .build());
132  options.addOptionGroup(transport_grp);
133 
134  // OmniSci database server details
135  options.addOption(Option.builder("s")
136  .hasArg()
137  .desc("OmniSci Server")
138  .longOpt("server")
139  .build());
140  options.addOption(Option.builder("db")
141  .hasArg()
142  .desc("OmniSci Database")
143  .longOpt("database")
144  .build());
145  options.addOption(
146  Option.builder().hasArg().desc("OmniSci Port").longOpt("port").build());
147 
148  // OmniSci server authentication options
149  options.addOption(Option.builder()
150  .hasArg()
151  .desc("CA certificate trust store")
152  .longOpt("ca-trust-store")
153  .build());
154  options.addOption(Option.builder()
155  .hasArg()
156  .desc("CA certificate trust store password")
157  .longOpt("ca-trust-store-passwd")
158  .build());
159  options.addOption(
160  Option.builder()
161  .desc("Inseure TLS - do not validate server OmniSci server credentials")
162  .longOpt("insecure")
163  .build());
164 
165  // Other database connection details
166  options.addOption(Option.builder("d")
167  .hasArg()
168  .desc("JDBC driver class")
169  .longOpt("driver")
170  .build());
171  options.addOption(Option.builder("c")
172  .hasArg()
173  .desc("JDBC Connection string")
174  .longOpt("jdbcConnect")
175  .required()
176  .build());
177  options.addOption(Option.builder("su")
178  .hasArg()
179  .desc("Source User")
180  .longOpt("sourceUser")
181  .required()
182  .build());
183  options.addOption(Option.builder("sp")
184  .hasArg()
185  .desc("Source Password")
186  .longOpt("sourcePasswd")
187  .required()
188  .build());
189  options.addOption(Option.builder("ss")
190  .hasArg()
191  .desc("SQL Select statement")
192  .longOpt("sqlStmt")
193  .required()
194  .build());
195 
196  options.addOption(Option.builder("t")
197  .hasArg()
198  .desc("OmniSci Target Table")
199  .longOpt("targetTable")
200  .required()
201  .build());
202 
203  options.addOption(Option.builder("b")
204  .hasArg()
205  .desc("transfer buffer size")
206  .longOpt("bufferSize")
207  .build());
208  options.addOption(Option.builder("f")
209  .hasArg()
210  .desc("table fragment size")
211  .longOpt("fragmentSize")
212  .build());
213 
214  options.addOption(Option.builder("tr")
215  .desc("Truncate table if it exists")
216  .longOpt("truncate")
217  .build());
218  options.addOption(Option.builder("i")
219  .hasArg()
220  .desc("File containing init command for DB")
221  .longOpt("initializeFile")
222  .build());
223  }
224 
225  private Option setOptionRequired(Option option) {
226  option.setRequired(true);
227  return option;
228  }
229 
230  public CommandLine parse(String[] args) throws ParseException {
231  CommandLineParser clp = new DefaultParser() {
232  public CommandLine parse(Options options, String[] strings) throws ParseException {
233  Options helpOptions = new Options();
234  helpOptions.addOption(
235  Option.builder("h").desc("help message").longOpt("help").build());
236  try {
237  CommandLine cmd = super.parse(helpOptions, strings);
238  } catch (UnrecognizedOptionException uE) {
239  }
240  if (cmd.hasOption("help")) {
242  exit(0);
243  }
244  if (cmd.hasOption("version")) {
245  printVersion();
246  exit(0);
247  }
248  cmd = super.parse(options, strings);
249  if (!cmd.hasOption("user") && !cmd.hasOption("client-cert")) {
250  throw new MissingArgumentException(
251  "Must supply either an OmniSci db user or a user certificate");
252  }
253  // if user supplied must have password and visa versa
254  if (cmd.hasOption("user") || cmd.hasOption("passwd")) {
255  options.addOption(setOptionRequired(options.getOption("user")));
256  options.addOption(setOptionRequired(options.getOption("passwd")));
257  super.parse(options, strings);
258  }
259 
260  // FUTURE USE FOR USER Auth if user client-cert supplied must have client-key
261  // and
262  // visa versa
263  if (false) {
264  if (cmd.hasOption("client-cert") || cmd.hasOption("client-key")) {
265  options.addOption(setOptionRequired(options.getOption("ca-trust-store")));
266  options.addOption(
267  setOptionRequired(options.getOption("ca-trust-store-password")));
268  super.parse(options, strings);
269  }
270  if (options.getOption("user").isRequired()
271  && options.getOption("client-key").isRequired()) {
273  MutuallyExlusiveOptionsException.create(
274  "user/password can not be use with client-cert/client-key",
275  strings);
276  throw meo;
277  }
278 
279  if (cmd.hasOption("http")
280  || cmd.hasOption("binary")
281  && (cmd.hasOption("client-cert")
282  || cmd.hasOption("client-key"))) {
283  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
284  "http|binary can not be use with ca-cert|client-cert|client-key",
285  strings);
286  }
287  }
288 
289  if (cmd.hasOption("insecure") && !cmd.hasOption("https")) {
290  MutuallyExlusiveOptionsException meo = MutuallyExlusiveOptionsException.create(
291  "insecure can only be use with https", strings);
292  throw meo;
293  }
294 
295  return cmd;
296  }
297 
298  public CommandLine parse(Options options, String[] strings, boolean b)
299  throws ParseException {
300  return null;
301  }
302  };
303  return clp.parse(options, args);
304  }
305 }
306 
307 public class SQLImporter {
308  protected String session = null;
309  protected MapD.Client client = null;
310  private CommandLine cmd = null;
311  final static Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class);
312  private DateTimeUtils dateTimeUtils = (milliseconds) -> {
313  return milliseconds / 1000;
314  };
315 
317 
318  public static void main(String[] args) {
319  SQLImporter sq = new SQLImporter();
320  sq.doWork(args);
321  }
322 
323  void doWork(String[] args) {
324  // create Options object
325 
326  SQLImporter_args s_args = new SQLImporter_args();
327 
328  try {
329  cmd = s_args.parse(args);
330  } catch (ParseException ex) {
331  LOGGER.error(ex.getLocalizedMessage());
332  s_args.printHelpMessage();
333  exit(0);
334  }
335  executeQuery();
336  }
337 
338  void executeQuery() {
339  Connection conn = null;
340  Statement stmt = null;
341 
342  long totalTime = 0;
343 
344  try {
345  // Open a connection
346  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
347  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
348  cmd.getOptionValue("sourceUser"),
349  cmd.getOptionValue("sourcePasswd"));
350  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
351  long startTime = System.currentTimeMillis();
352 
353  // run init file script on targe DB if present
354  if (cmd.hasOption("initializeFile")) {
355  run_init(conn);
356  }
357 
358  // set autocommit off to allow postgress to not load all results
359  try {
360  conn.setAutoCommit(false);
361  } catch (SQLException se) {
362  LOGGER.warn(
363  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
364  + se.toString());
365  }
366 
367  // Execute a query
368  stmt = conn.createStatement();
369 
370  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
371  // set the jdbc fetch buffer size to reduce the amount of records being moved to
372  // java from postgress
373  stmt.setFetchSize(bufferSize);
374  long timer;
375 
376  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
377 
378  // check if table already exists and is compatible in OmniSci with the query
379  // metadata
380  ResultSetMetaData md = rs.getMetaData();
381  checkMapDTable(conn, md);
382 
383  timer = System.currentTimeMillis();
384 
385  long resultCount = 0;
386  int bufferCount = 0;
387  long total = 0;
388 
389  List<TColumn> cols = new ArrayList(md.getColumnCount());
390  for (int i = 1; i <= md.getColumnCount(); i++) {
391  TColumn col = setupBinaryColumn(i, md, bufferSize);
392  cols.add(col);
393  }
394 
395  // read data from old DB
396  while (rs.next()) {
397  for (int i = 1; i <= md.getColumnCount(); i++) {
398  setColValue(rs,
399  cols.get(i - 1),
400  md.getColumnType(i),
401  i,
402  md.getScale(i),
403  md.getColumnTypeName(i));
404  }
405  resultCount++;
406  bufferCount++;
407  if (bufferCount == bufferSize) {
408  bufferCount = 0;
409  // send the buffer to mapD
410  client.load_table_binary_columnar(
411  session, cmd.getOptionValue("targetTable"), cols); // old
412  // recreate columnar store for use
413  for (int i = 1; i <= md.getColumnCount(); i++) {
414  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
415  }
416 
417  if (resultCount % 100000 == 0) {
418  LOGGER.info("Imported " + resultCount + " records");
419  }
420  }
421  }
422  if (bufferCount > 0) {
423  // send the LAST buffer to mapD
424  client.load_table_binary_columnar(
425  session, cmd.getOptionValue("targetTable"), cols);
426  bufferCount = 0;
427  }
428  LOGGER.info("result set count is " + resultCount + " read time is "
429  + (System.currentTimeMillis() - timer) + "ms");
430 
431  // Clean-up environment
432  rs.close();
433  stmt.close();
434 
435  totalTime = System.currentTimeMillis() - startTime;
436  conn.close();
437  } catch (SQLException se) {
438  LOGGER.error("SQLException - " + se.toString());
439  se.printStackTrace();
440  } catch (TMapDException ex) {
441  LOGGER.error("TMapDException - " + ex.toString());
442  ex.printStackTrace();
443  } catch (TException ex) {
444  LOGGER.error("TException failed - " + ex.toString());
445  ex.printStackTrace();
446  } finally {
447  // finally block used to close resources
448  try {
449  if (stmt != null) {
450  stmt.close();
451  }
452  } catch (SQLException se2) {
453  } // nothing we can do
454  try {
455  if (conn != null) {
456  conn.close();
457  }
458  } catch (SQLException se) {
459  LOGGER.error("SQlException in close - " + se.toString());
460  se.printStackTrace();
461  } // end finally try
462  } // end try
463  }
464 
465  private void run_init(Connection conn) {
466  // attempt to open file
467  String line = "";
468  try {
469  BufferedReader reader =
470  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
471  Statement stmt = conn.createStatement();
472  while ((line = reader.readLine()) != null) {
473  if (line.isEmpty()) {
474  continue;
475  }
476  LOGGER.info("Running : " + line);
477  stmt.execute(line);
478  }
479  stmt.close();
480  reader.close();
481  } catch (IOException e) {
482  LOGGER.error("Exception occurred trying to read initialize file: "
483  + cmd.getOptionValue("initFile"));
484  exit(1);
485  } catch (SQLException e) {
486  LOGGER.error(
487  "Exception occurred trying to execute initialize file entry : " + line);
488  exit(1);
489  }
490  }
491 
492  private void help(Options options) {
493  // automatically generate the help statement
494  HelpFormatter formatter = new HelpFormatter();
495  formatter.setOptionComparator(null); // get options in the order they are created
496  formatter.printHelp("SQLImporter", options);
497  }
498 
499  private void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
500  throws SQLException {
502  String tName = cmd.getOptionValue("targetTable");
503 
504  if (tableExists(tName)) {
505  // check if we want to truncate
506  if (cmd.hasOption("truncate")) {
507  executeMapDCommand("Drop table " + tName);
508  createMapDTable(otherdb_conn, md);
509  } else {
510  List<TColumnType> columnInfo = getColumnInfo(tName);
511  // table exists lets check it has same number of columns
512 
513  if (md.getColumnCount() != columnInfo.size()) {
514  LOGGER.error("Table sizes do not match - OmniSci " + columnInfo.size()
515  + " versus Select " + md.getColumnCount());
516  exit(1);
517  }
518  // table exists lets check it is same layout - check names will do for now
519  // Note weird start from 1 and reduce index by one is due to sql metatdata
520  // beinging with 1 not 0
521  for (int colNum = 1; colNum <= columnInfo.size(); colNum++) {
522  if (!columnInfo.get(colNum - 1)
523  .col_name.equalsIgnoreCase(md.getColumnName(colNum))) {
524  LOGGER.error(
525  "OmniSci Table does not have matching column in same order for column number"
526  + colNum + " OmniSci column name is "
527  + columnInfo.get(colNum - 1).col_name + " versus Select "
528  + md.getColumnName(colNum));
529  exit(1);
530  }
531  }
532  }
533  } else {
534  createMapDTable(otherdb_conn, md);
535  }
536  }
537 
538  private void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData) {
539  StringBuilder sb = new StringBuilder();
540  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
541 
542  // Now iterate the metadata
543  try {
544  for (int i = 1; i <= metaData.getColumnCount(); i++) {
545  if (i > 1) {
546  sb.append(",");
547  }
548  LOGGER.debug("Column name is " + metaData.getColumnName(i));
549  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
550  LOGGER.debug("Column type is " + metaData.getColumnType(i));
551 
552  sb.append(metaData.getColumnName(i)).append(" ");
553  int col_type = metaData.getColumnType(i);
554  if (col_type == java.sql.Types.OTHER) {
555  sb.append(vendor_types.find_gis_type(otherdb_conn, metaData, i));
556  } else {
557  sb.append(getColType(metaData.getColumnType(i),
558  metaData.getPrecision(i),
559  metaData.getScale(i)));
560  }
561  }
562  sb.append(")");
563 
564  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
565  sb.append(" with (fragment_size = ");
566  sb.append(cmd.getOptionValue("fragmentSize", "0"));
567  sb.append(")");
568  }
569 
570  } catch (SQLException ex) {
571  LOGGER.error("Error processing the metadata - " + ex.toString());
572  exit(1);
573  }
574 
575  executeMapDCommand(sb.toString());
576  }
577 
578  private void createMapDConnection() {
579  TTransport transport = null;
580  TProtocol protocol = new TBinaryProtocol(transport);
581  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
582  String server = cmd.getOptionValue("server", "localhost");
583  try {
584  // Uses default certificate stores.
585  boolean load_trust_store = cmd.hasOption("https");
586  SockTransportProperties skT = null;
587  if (cmd.hasOption("https")) {
588  skT = new SockTransportProperties(load_trust_store & !cmd.hasOption("insecure"));
589  transport = skT.openHttpsClientTransport(server, port);
590  transport.open();
591  protocol = new TJSONProtocol(transport);
592  } else if (cmd.hasOption("http")) {
593  skT = new SockTransportProperties(load_trust_store);
594  transport = skT.openHttpClientTransport(server, port);
595  protocol = new TJSONProtocol(transport);
596  } else {
597  skT = new SockTransportProperties(load_trust_store);
598  transport = skT.openClientTransport(server, port);
599  transport.open();
600  protocol = new TBinaryProtocol(transport);
601  }
602 
603  client = new MapD.Client(protocol);
604  // This if will be useless until PKI signon
605  if (cmd.hasOption("user")) {
606  session = client.connect(cmd.getOptionValue("user", "admin"),
607  cmd.getOptionValue("passwd", "HyperInteractive"),
608  cmd.getOptionValue("database", "omnisci"));
609  }
610  LOGGER.debug("Connected session is " + session);
611 
612  } catch (TTransportException ex) {
613  LOGGER.error("Connection failed - " + ex.toString());
614  exit(1);
615  } catch (TMapDException ex) {
616  LOGGER.error("Connection failed - " + ex.toString());
617  exit(2);
618  } catch (TException ex) {
619  LOGGER.error("Connection failed - " + ex.toString());
620  exit(3);
621  } catch (Exception ex) {
622  LOGGER.error("General exception - " + ex.toString());
623  exit(4);
624  }
625  }
626 
627  private List<TColumnType> getColumnInfo(String tName) {
628  LOGGER.debug("Getting columns for " + tName);
629  List<TColumnType> row_descriptor = null;
630  try {
631  TTableDetails table_details = client.get_table_details(session, tName);
632  row_descriptor = table_details.row_desc;
633  } catch (TMapDException ex) {
634  LOGGER.error("column check failed - " + ex.toString());
635  exit(3);
636  } catch (TException ex) {
637  LOGGER.error("column check failed - " + ex.toString());
638  exit(3);
639  }
640  return row_descriptor;
641  }
642 
643  private boolean tableExists(String tName) {
644  LOGGER.debug("Check for table " + tName);
645  try {
646  List<String> recv_get_tables = client.get_tables(session);
647  for (String s : recv_get_tables) {
648  if (s.equals(tName)) {
649  return true;
650  }
651  }
652  } catch (TMapDException ex) {
653  LOGGER.error("Table check failed - " + ex.toString());
654  exit(3);
655  } catch (TException ex) {
656  LOGGER.error("Table check failed - " + ex.toString());
657  exit(3);
658  }
659  return false;
660  }
661 
662  private void executeMapDCommand(String sql) {
663  LOGGER.info(" run comamnd :" + sql);
664 
665  try {
666  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
667  } catch (TMapDException ex) {
668  LOGGER.error("SQL Execute failed - " + ex.toString());
669  exit(1);
670  } catch (TException ex) {
671  LOGGER.error("SQL Execute failed - " + ex.toString());
672  exit(1);
673  }
674  }
675 
676  private String getColType(int cType, int precision, int scale) {
677  // Note - if cType is OTHER a earlier call will have been made
678  // to try and work out the db vendors specific type.
679  if (precision > 19) {
680  precision = 19;
681  }
682  if (scale > 19) {
683  scale = 18;
684  }
685  switch (cType) {
686  case java.sql.Types.TINYINT:
687  return ("TINYINT");
688  case java.sql.Types.SMALLINT:
689  return ("SMALLINT");
690  case java.sql.Types.INTEGER:
691  return ("INTEGER");
692  case java.sql.Types.BIGINT:
693  return ("BIGINT");
694  case java.sql.Types.FLOAT:
695  return ("FLOAT");
696  case java.sql.Types.DECIMAL:
697  return ("DECIMAL(" + precision + "," + scale + ")");
698  case java.sql.Types.DOUBLE:
699  return ("DOUBLE");
700  case java.sql.Types.REAL:
701  return ("REAL");
702  case java.sql.Types.NUMERIC:
703  return ("NUMERIC(" + precision + "," + scale + ")");
704  case java.sql.Types.TIME:
705  return ("TIME");
706  case java.sql.Types.TIMESTAMP:
707  return ("TIMESTAMP");
708  case java.sql.Types.DATE:
709  return ("DATE");
710  case java.sql.Types.BOOLEAN:
711  case java.sql.Types
712  .BIT: // deal with postgress treating boolean as bit... this will bite me
713  return ("BOOLEAN");
714  case java.sql.Types.NVARCHAR:
715  case java.sql.Types.VARCHAR:
716  case java.sql.Types.NCHAR:
717  case java.sql.Types.CHAR:
718  case java.sql.Types.LONGVARCHAR:
719  case java.sql.Types.LONGNVARCHAR:
720  return ("TEXT ENCODING DICT");
721  default:
722  throw new AssertionError("Column type " + cType + " not Supported");
723  }
724  }
725 
726  private TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
727  throws SQLException {
728  TColumn col = new TColumn();
729 
730  col.nulls = new ArrayList<Boolean>(bufferSize);
731 
732  col.data = new TColumnData();
733 
734  switch (md.getColumnType(i)) {
735  case java.sql.Types.TINYINT:
736  case java.sql.Types.SMALLINT:
737  case java.sql.Types.INTEGER:
738  case java.sql.Types.BIGINT:
739  case java.sql.Types.TIME:
740  case java.sql.Types.TIMESTAMP:
741  case java.sql.Types
742  .BIT: // deal with postgress treating boolean as bit... this will bite me
743  case java.sql.Types.BOOLEAN:
744  case java.sql.Types.DATE:
745  case java.sql.Types.DECIMAL:
746  case java.sql.Types.NUMERIC:
747  col.data.int_col = new ArrayList<Long>(bufferSize);
748  break;
749 
750  case java.sql.Types.FLOAT:
751  case java.sql.Types.DOUBLE:
752  case java.sql.Types.REAL:
753  col.data.real_col = new ArrayList<Double>(bufferSize);
754  break;
755 
756  case java.sql.Types.NVARCHAR:
757  case java.sql.Types.VARCHAR:
758  case java.sql.Types.NCHAR:
759  case java.sql.Types.CHAR:
760  case java.sql.Types.LONGVARCHAR:
761  case java.sql.Types.LONGNVARCHAR:
762  case java.sql.Types.OTHER:
763  col.data.str_col = new ArrayList<String>(bufferSize);
764  break;
765 
766  default:
767  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
768  }
769  return col;
770  }
771 
772  private void setColValue(ResultSet rs,
773  TColumn col,
774  int columnType,
775  int colNum,
776  int scale,
777  String colTypeName) throws SQLException {
778  switch (columnType) {
779  case java.sql.Types
780  .BIT: // deal with postgress treating boolean as bit... this will bite me
781  case java.sql.Types.BOOLEAN:
782  Boolean b = rs.getBoolean(colNum);
783  if (rs.wasNull()) {
784  col.nulls.add(Boolean.TRUE);
785  col.data.int_col.add(0L);
786  } else {
787  col.nulls.add(Boolean.FALSE);
788  col.data.int_col.add(b ? 1L : 0L);
789  }
790  break;
791 
792  case java.sql.Types.DECIMAL:
793  case java.sql.Types.NUMERIC:
794  BigDecimal bd = rs.getBigDecimal(colNum);
795  if (rs.wasNull()) {
796  col.nulls.add(Boolean.TRUE);
797  col.data.int_col.add(0L);
798  } else {
799  col.nulls.add(Boolean.FALSE);
800  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
801  }
802  break;
803 
804  case java.sql.Types.TINYINT:
805  case java.sql.Types.SMALLINT:
806  case java.sql.Types.INTEGER:
807  case java.sql.Types.BIGINT:
808  Long l = rs.getLong(colNum);
809  if (rs.wasNull()) {
810  col.nulls.add(Boolean.TRUE);
811  col.data.int_col.add(new Long(0));
812  } else {
813  col.nulls.add(Boolean.FALSE);
814  col.data.int_col.add(l);
815  }
816  break;
817 
818  case java.sql.Types.TIME:
819  Time t = rs.getTime(colNum);
820  if (rs.wasNull()) {
821  col.nulls.add(Boolean.TRUE);
822  col.data.int_col.add(0L);
823 
824  } else {
825  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
826  col.nulls.add(Boolean.FALSE);
827  }
828 
829  break;
830  case java.sql.Types.TIMESTAMP:
831  Timestamp ts = rs.getTimestamp(colNum);
832  if (rs.wasNull()) {
833  col.nulls.add(Boolean.TRUE);
834  col.data.int_col.add(0L);
835 
836  } else {
837  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
838  col.nulls.add(Boolean.FALSE);
839  }
840 
841  break;
842  case java.sql.Types.DATE:
843  Date d = rs.getDate(colNum);
844  if (rs.wasNull()) {
845  col.nulls.add(Boolean.TRUE);
846  col.data.int_col.add(0L);
847 
848  } else {
849  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
850  col.nulls.add(Boolean.FALSE);
851  }
852  break;
853  case java.sql.Types.FLOAT:
854  case java.sql.Types.DOUBLE:
855  case java.sql.Types.REAL:
856  Double db = rs.getDouble(colNum);
857  if (rs.wasNull()) {
858  col.nulls.add(Boolean.TRUE);
859  col.data.real_col.add(new Double(0));
860 
861  } else {
862  col.nulls.add(Boolean.FALSE);
863  col.data.real_col.add(db);
864  }
865  break;
866 
867  case java.sql.Types.NVARCHAR:
868  case java.sql.Types.VARCHAR:
869  case java.sql.Types.NCHAR:
870  case java.sql.Types.CHAR:
871  case java.sql.Types.LONGVARCHAR:
872  case java.sql.Types.LONGNVARCHAR:
873  String strVal = rs.getString(colNum);
874  if (rs.wasNull()) {
875  col.nulls.add(Boolean.TRUE);
876  col.data.str_col.add("");
877 
878  } else {
879  col.data.str_col.add(strVal);
880  col.nulls.add(Boolean.FALSE);
881  }
882  break;
883  case java.sql.Types.OTHER:
884  if (rs.wasNull()) {
885  col.nulls.add(Boolean.TRUE);
886  col.data.str_col.add("");
887  } else {
888  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
889  col.nulls.add(Boolean.FALSE);
890  }
891  break;
892  default:
893  throw new AssertionError("Column type " + columnType + " not Supported");
894  }
895  }
896 
897  private void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
898  throws SQLException {
899  col.nulls.clear();
900 
901  switch (md.getColumnType(i)) {
902  case java.sql.Types.TINYINT:
903  case java.sql.Types.SMALLINT:
904  case java.sql.Types.INTEGER:
905  case java.sql.Types.BIGINT:
906  case java.sql.Types.TIME:
907  case java.sql.Types.TIMESTAMP:
908  case java.sql.Types
909  .BIT: // deal with postgress treating boolean as bit... this will bite me
910  case java.sql.Types.BOOLEAN:
911  case java.sql.Types.DATE:
912  case java.sql.Types.DECIMAL:
913  case java.sql.Types.NUMERIC:
914  col.data.int_col.clear();
915  break;
916 
917  case java.sql.Types.FLOAT:
918  case java.sql.Types.DOUBLE:
919  case java.sql.Types.REAL:
920  col.data.real_col.clear();
921  break;
922 
923  case java.sql.Types.NVARCHAR:
924  case java.sql.Types.VARCHAR:
925  case java.sql.Types.NCHAR:
926  case java.sql.Types.CHAR:
927  case java.sql.Types.LONGVARCHAR:
928  case java.sql.Types.LONGNVARCHAR:
929  col.data.str_col.clear();
930  break;
931 
932  // Handle WKT for geo columns
933  case java.sql.Types.OTHER:
934  col.data.str_col.clear();
935  break;
936 
937  default:
938  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
939  }
940  }
941 }
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
Option setOptionRequired(Option option)
String getColType(int cType, int precision, int scale)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
void help(Options options)
static void main(String[] args)
void doWork(String[] args)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
List< TColumnType > getColumnInfo(String tName)
CommandLine parse(String[] args)
size_t append(FILE *f, const size_t size, int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:136
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
static final Logger LOGGER
boolean tableExists(String tName)
long getSecondsFromMilliseconds(long milliseconds)
void executeMapDCommand(String sql)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
static MutuallyExlusiveOptionsException create(String errMsg, String[] strings)
void run_init(Connection conn)