OmniSciDB  1dac507f6e
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Pages
com.mapd.utility.SQLImporter Class Reference
+ Collaboration diagram for com.mapd.utility.SQLImporter:

Static Public Member Functions

static void main (String[] args)
 

Protected Attributes

String session = null
 
MapD.Client client = null
 

Package Functions

void doWork (String[] args)
 
void executeQuery ()
 

Package Attributes

Db_vendor_types vendor_types = null
 

Static Package Attributes

static final Logger LOGGER = LoggerFactory.getLogger(SQLImporter.class)
 

Private Member Functions

void run_init (Connection conn)
 
void help (Options options)
 
void checkMapDTable (Connection otherdb_conn, ResultSetMetaData md) throws SQLException
 
void createMapDTable (Connection otherdb_conn, ResultSetMetaData metaData)
 
void createMapDConnection ()
 
List< TColumnType > getColumnInfo (String tName)
 
boolean tableExists (String tName)
 
void executeMapDCommand (String sql)
 
String getColType (int cType, int precision, int scale)
 
TColumn setupBinaryColumn (int i, ResultSetMetaData md, int bufferSize) throws SQLException
 
void setColValue (ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName) throws SQLException
 
void resetBinaryColumn (int i, ResultSetMetaData md, int bufferSize, TColumn col) throws SQLException
 

Private Attributes

CommandLine cmd = null
 
DateTimeUtils dateTimeUtils
 

Detailed Description

Definition at line 307 of file SQLImporter.java.

Member Function Documentation

void com.mapd.utility.SQLImporter.checkMapDTable ( Connection  otherdb_conn,
ResultSetMetaData  md 
) throws SQLException
inlineprivate

Definition at line 499 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.cmd, com.mapd.utility.SQLImporter.createMapDConnection(), com.mapd.utility.SQLImporter.createMapDTable(), com.mapd.utility.SQLImporter.executeMapDCommand(), com.mapd.utility.SQLImporter.getColumnInfo(), Experimental.String, and com.mapd.utility.SQLImporter.tableExists().

Referenced by com.mapd.utility.SQLImporter.executeQuery().

500  {
502  String tName = cmd.getOptionValue("targetTable");
503 
504  if (tableExists(tName)) {
505  // check if we want to truncate
506  if (cmd.hasOption("truncate")) {
507  executeMapDCommand("Drop table " + tName);
508  createMapDTable(otherdb_conn, md);
509  } else {
510  List<TColumnType> columnInfo = getColumnInfo(tName);
511  // table exists lets check it has same number of columns
512 
513  if (md.getColumnCount() != columnInfo.size()) {
514  LOGGER.error("Table sizes do not match - OmniSci " + columnInfo.size()
515  + " versus Select " + md.getColumnCount());
516  exit(1);
517  }
518  // table exists lets check it is same layout - check names will do for now
519  // Note weird start from 1 and reduce index by one is due to sql metatdata
520  // beinging with 1 not 0
521  for (int colNum = 1; colNum <= columnInfo.size(); colNum++) {
522  if (!columnInfo.get(colNum - 1)
523  .col_name.equalsIgnoreCase(md.getColumnName(colNum))) {
524  LOGGER.error(
525  "OmniSci Table does not have matching column in same order for column number"
526  + colNum + " OmniSci column name is "
527  + columnInfo.get(colNum - 1).col_name + " versus Select "
528  + md.getColumnName(colNum));
529  exit(1);
530  }
531  }
532  }
533  } else {
534  createMapDTable(otherdb_conn, md);
535  }
536  }
List< TColumnType > getColumnInfo(String tName)
void createMapDTable(Connection otherdb_conn, ResultSetMetaData metaData)
boolean tableExists(String tName)
void executeMapDCommand(String sql)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.createMapDConnection ( )
inlineprivate

Definition at line 578 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.client, com.mapd.utility.SQLImporter.cmd, com.mapd.utility.SQLImporter.session, and Experimental.String.

Referenced by com.mapd.utility.SQLImporter.checkMapDTable().

578  {
579  TTransport transport = null;
580  TProtocol protocol = new TBinaryProtocol(transport);
581  int port = Integer.valueOf(cmd.getOptionValue("port", "6274"));
582  String server = cmd.getOptionValue("server", "localhost");
583  try {
584  // Uses default certificate stores.
585  boolean load_trust_store = cmd.hasOption("https");
586  SockTransportProperties skT = null;
587  if (cmd.hasOption("https")) {
588  skT = new SockTransportProperties(load_trust_store & !cmd.hasOption("insecure"));
589  transport = skT.openHttpsClientTransport(server, port);
590  transport.open();
591  protocol = new TJSONProtocol(transport);
592  } else if (cmd.hasOption("http")) {
593  skT = new SockTransportProperties(load_trust_store);
594  transport = skT.openHttpClientTransport(server, port);
595  protocol = new TJSONProtocol(transport);
596  } else {
597  skT = new SockTransportProperties(load_trust_store);
598  transport = skT.openClientTransport(server, port);
599  transport.open();
600  protocol = new TBinaryProtocol(transport);
601  }
602 
603  client = new MapD.Client(protocol);
604  // This if will be useless until PKI signon
605  if (cmd.hasOption("user")) {
606  session = client.connect(cmd.getOptionValue("user", "admin"),
607  cmd.getOptionValue("passwd", "HyperInteractive"),
608  cmd.getOptionValue("database", "omnisci"));
609  }
610  LOGGER.debug("Connected session is " + session);
611 
612  } catch (TTransportException ex) {
613  LOGGER.error("Connection failed - " + ex.toString());
614  exit(1);
615  } catch (TMapDException ex) {
616  LOGGER.error("Connection failed - " + ex.toString());
617  exit(2);
618  } catch (TException ex) {
619  LOGGER.error("Connection failed - " + ex.toString());
620  exit(3);
621  } catch (Exception ex) {
622  LOGGER.error("General exception - " + ex.toString());
623  exit(4);
624  }
625  }

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.createMapDTable ( Connection  otherdb_conn,
ResultSetMetaData  metaData 
)
inlineprivate

Definition at line 538 of file SQLImporter.java.

References File_Namespace.append(), com.mapd.utility.SQLImporter.cmd, com.mapd.utility.SQLImporter.executeMapDCommand(), com.mapd.utility.SQLImporter.getColType(), and Integer.

Referenced by com.mapd.utility.SQLImporter.checkMapDTable().

538  {
539  StringBuilder sb = new StringBuilder();
540  sb.append("Create table ").append(cmd.getOptionValue("targetTable")).append("(");
541 
542  // Now iterate the metadata
543  try {
544  for (int i = 1; i <= metaData.getColumnCount(); i++) {
545  if (i > 1) {
546  sb.append(",");
547  }
548  LOGGER.debug("Column name is " + metaData.getColumnName(i));
549  LOGGER.debug("Column type is " + metaData.getColumnTypeName(i));
550  LOGGER.debug("Column type is " + metaData.getColumnType(i));
551 
552  sb.append(metaData.getColumnName(i)).append(" ");
553  int col_type = metaData.getColumnType(i);
554  if (col_type == java.sql.Types.OTHER) {
555  sb.append(vendor_types.find_gis_type(otherdb_conn, metaData, i));
556  } else {
557  sb.append(getColType(metaData.getColumnType(i),
558  metaData.getPrecision(i),
559  metaData.getScale(i)));
560  }
561  }
562  sb.append(")");
563 
564  if (Integer.valueOf(cmd.getOptionValue("fragmentSize", "0")) > 0) {
565  sb.append(" with (fragment_size = ");
566  sb.append(cmd.getOptionValue("fragmentSize", "0"));
567  sb.append(")");
568  }
569 
570  } catch (SQLException ex) {
571  LOGGER.error("Error processing the metadata - " + ex.toString());
572  exit(1);
573  }
574 
575  executeMapDCommand(sb.toString());
576  }
String getColType(int cType, int precision, int scale)
size_t append(FILE *f, const size_t size, int8_t *buf)
Appends the specified number of bytes to the end of the file f from buf.
Definition: File.cpp:136
void executeMapDCommand(String sql)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.doWork ( String[]  args)
inlinepackage

Definition at line 323 of file SQLImporter.java.

References run_benchmark_import.args, com.mapd.utility.SQLImporter.cmd, and com.mapd.utility.SQLImporter.executeQuery().

323  {
324  // create Options object
325 
326  SQLImporter_args s_args = new SQLImporter_args();
327 
328  try {
329  cmd = s_args.parse(args);
330  } catch (ParseException ex) {
331  LOGGER.error(ex.getLocalizedMessage());
332  s_args.printHelpMessage();
333  exit(0);
334  }
335  executeQuery();
336  }

+ Here is the call graph for this function:

void com.mapd.utility.SQLImporter.executeMapDCommand ( String  sql)
inlineprivate

Definition at line 662 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.session.

Referenced by com.mapd.utility.SQLImporter.checkMapDTable(), and com.mapd.utility.SQLImporter.createMapDTable().

662  {
663  LOGGER.info(" run comamnd :" + sql);
664 
665  try {
666  TQueryResult sqlResult = client.sql_execute(session, sql + ";", true, null, -1, -1);
667  } catch (TMapDException ex) {
668  LOGGER.error("SQL Execute failed - " + ex.toString());
669  exit(1);
670  } catch (TException ex) {
671  LOGGER.error("SQL Execute failed - " + ex.toString());
672  exit(1);
673  }
674  }

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.executeQuery ( )
inlinepackage

Definition at line 338 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.checkMapDTable(), com.mapd.utility.SQLImporter.cmd, com.mapd.utility.SQLImporter.resetBinaryColumn(), com.mapd.utility.SQLImporter.run_init(), com.mapd.utility.SQLImporter.session, com.mapd.utility.SQLImporter.setColValue(), com.mapd.utility.SQLImporter.setupBinaryColumn(), and com.mapd.utility.SQLImporter.vendor_types.

Referenced by com.mapd.utility.SQLImporter.doWork().

338  {
339  Connection conn = null;
340  Statement stmt = null;
341 
342  long totalTime = 0;
343 
344  try {
345  // Open a connection
346  LOGGER.info("Connecting to database url :" + cmd.getOptionValue("jdbcConnect"));
347  conn = DriverManager.getConnection(cmd.getOptionValue("jdbcConnect"),
348  cmd.getOptionValue("sourceUser"),
349  cmd.getOptionValue("sourcePasswd"));
350  vendor_types = Db_vendor_types.Db_vendor_factory(cmd.getOptionValue("jdbcConnect"));
351  long startTime = System.currentTimeMillis();
352 
353  // run init file script on targe DB if present
354  if (cmd.hasOption("initializeFile")) {
355  run_init(conn);
356  }
357 
358  // set autocommit off to allow postgress to not load all results
359  try {
360  conn.setAutoCommit(false);
361  } catch (SQLException se) {
362  LOGGER.warn(
363  "SQLException when attempting to setAutoCommit to false, jdbc driver probably doesnt support it. Error is "
364  + se.toString());
365  }
366 
367  // Execute a query
368  stmt = conn.createStatement();
369 
370  int bufferSize = Integer.valueOf(cmd.getOptionValue("bufferSize", "10000"));
371  // set the jdbc fetch buffer size to reduce the amount of records being moved to
372  // java from postgress
373  stmt.setFetchSize(bufferSize);
374  long timer;
375 
376  ResultSet rs = stmt.executeQuery(cmd.getOptionValue("sqlStmt"));
377 
378  // check if table already exists and is compatible in OmniSci with the query
379  // metadata
380  ResultSetMetaData md = rs.getMetaData();
381  checkMapDTable(conn, md);
382 
383  timer = System.currentTimeMillis();
384 
385  long resultCount = 0;
386  int bufferCount = 0;
387  long total = 0;
388 
389  List<TColumn> cols = new ArrayList(md.getColumnCount());
390  for (int i = 1; i <= md.getColumnCount(); i++) {
391  TColumn col = setupBinaryColumn(i, md, bufferSize);
392  cols.add(col);
393  }
394 
395  // read data from old DB
396  while (rs.next()) {
397  for (int i = 1; i <= md.getColumnCount(); i++) {
398  setColValue(rs,
399  cols.get(i - 1),
400  md.getColumnType(i),
401  i,
402  md.getScale(i),
403  md.getColumnTypeName(i));
404  }
405  resultCount++;
406  bufferCount++;
407  if (bufferCount == bufferSize) {
408  bufferCount = 0;
409  // send the buffer to mapD
410  client.load_table_binary_columnar(
411  session, cmd.getOptionValue("targetTable"), cols); // old
412  // recreate columnar store for use
413  for (int i = 1; i <= md.getColumnCount(); i++) {
414  resetBinaryColumn(i, md, bufferSize, cols.get(i - 1));
415  }
416 
417  if (resultCount % 100000 == 0) {
418  LOGGER.info("Imported " + resultCount + " records");
419  }
420  }
421  }
422  if (bufferCount > 0) {
423  // send the LAST buffer to mapD
424  client.load_table_binary_columnar(
425  session, cmd.getOptionValue("targetTable"), cols);
426  bufferCount = 0;
427  }
428  LOGGER.info("result set count is " + resultCount + " read time is "
429  + (System.currentTimeMillis() - timer) + "ms");
430 
431  // Clean-up environment
432  rs.close();
433  stmt.close();
434 
435  totalTime = System.currentTimeMillis() - startTime;
436  conn.close();
437  } catch (SQLException se) {
438  LOGGER.error("SQLException - " + se.toString());
439  se.printStackTrace();
440  } catch (TMapDException ex) {
441  LOGGER.error("TMapDException - " + ex.toString());
442  ex.printStackTrace();
443  } catch (TException ex) {
444  LOGGER.error("TException failed - " + ex.toString());
445  ex.printStackTrace();
446  } finally {
447  // finally block used to close resources
448  try {
449  if (stmt != null) {
450  stmt.close();
451  }
452  } catch (SQLException se2) {
453  } // nothing we can do
454  try {
455  if (conn != null) {
456  conn.close();
457  }
458  } catch (SQLException se) {
459  LOGGER.error("SQlException in close - " + se.toString());
460  se.printStackTrace();
461  } // end finally try
462  } // end try
463  }
void resetBinaryColumn(int i, ResultSetMetaData md, int bufferSize, TColumn col)
void checkMapDTable(Connection otherdb_conn, ResultSetMetaData md)
void setColValue(ResultSet rs, TColumn col, int columnType, int colNum, int scale, String colTypeName)
TColumn setupBinaryColumn(int i, ResultSetMetaData md, int bufferSize)
void run_init(Connection conn)

+ Here is the call graph for this function:

+ Here is the caller graph for this function:

String com.mapd.utility.SQLImporter.getColType ( int  cType,
int  precision,
int  scale 
)
inlineprivate

Definition at line 676 of file SQLImporter.java.

Referenced by com.mapd.utility.SQLImporter.createMapDTable().

676  {
677  // Note - if cType is OTHER a earlier call will have been made
678  // to try and work out the db vendors specific type.
679  if (precision > 19) {
680  precision = 19;
681  }
682  if (scale > 19) {
683  scale = 18;
684  }
685  switch (cType) {
686  case java.sql.Types.TINYINT:
687  return ("TINYINT");
688  case java.sql.Types.SMALLINT:
689  return ("SMALLINT");
690  case java.sql.Types.INTEGER:
691  return ("INTEGER");
692  case java.sql.Types.BIGINT:
693  return ("BIGINT");
694  case java.sql.Types.FLOAT:
695  return ("FLOAT");
696  case java.sql.Types.DECIMAL:
697  return ("DECIMAL(" + precision + "," + scale + ")");
698  case java.sql.Types.DOUBLE:
699  return ("DOUBLE");
700  case java.sql.Types.REAL:
701  return ("REAL");
702  case java.sql.Types.NUMERIC:
703  return ("NUMERIC(" + precision + "," + scale + ")");
704  case java.sql.Types.TIME:
705  return ("TIME");
706  case java.sql.Types.TIMESTAMP:
707  return ("TIMESTAMP");
708  case java.sql.Types.DATE:
709  return ("DATE");
710  case java.sql.Types.BOOLEAN:
711  case java.sql.Types
712  .BIT: // deal with postgress treating boolean as bit... this will bite me
713  return ("BOOLEAN");
714  case java.sql.Types.NVARCHAR:
715  case java.sql.Types.VARCHAR:
716  case java.sql.Types.NCHAR:
717  case java.sql.Types.CHAR:
718  case java.sql.Types.LONGVARCHAR:
719  case java.sql.Types.LONGNVARCHAR:
720  return ("TEXT ENCODING DICT");
721  default:
722  throw new AssertionError("Column type " + cType + " not Supported");
723  }
724  }

+ Here is the caller graph for this function:

List<TColumnType> com.mapd.utility.SQLImporter.getColumnInfo ( String  tName)
inlineprivate

Definition at line 627 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.session.

Referenced by com.mapd.utility.SQLImporter.checkMapDTable().

627  {
628  LOGGER.debug("Getting columns for " + tName);
629  List<TColumnType> row_descriptor = null;
630  try {
631  TTableDetails table_details = client.get_table_details(session, tName);
632  row_descriptor = table_details.row_desc;
633  } catch (TMapDException ex) {
634  LOGGER.error("column check failed - " + ex.toString());
635  exit(3);
636  } catch (TException ex) {
637  LOGGER.error("column check failed - " + ex.toString());
638  exit(3);
639  }
640  return row_descriptor;
641  }

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.help ( Options  options)
inlineprivate

Definition at line 492 of file SQLImporter.java.

492  {
493  // automatically generate the help statement
494  HelpFormatter formatter = new HelpFormatter();
495  formatter.setOptionComparator(null); // get options in the order they are created
496  formatter.printHelp("SQLImporter", options);
497  }
static void com.mapd.utility.SQLImporter.main ( String[]  args)
inlinestatic

Definition at line 318 of file SQLImporter.java.

References run_benchmark_import.args.

318  {
319  SQLImporter sq = new SQLImporter();
320  sq.doWork(args);
321  }
void com.mapd.utility.SQLImporter.resetBinaryColumn ( int  i,
ResultSetMetaData  md,
int  bufferSize,
TColumn  col 
) throws SQLException
inlineprivate

Definition at line 897 of file SQLImporter.java.

Referenced by com.mapd.utility.SQLImporter.executeQuery().

898  {
899  col.nulls.clear();
900 
901  switch (md.getColumnType(i)) {
902  case java.sql.Types.TINYINT:
903  case java.sql.Types.SMALLINT:
904  case java.sql.Types.INTEGER:
905  case java.sql.Types.BIGINT:
906  case java.sql.Types.TIME:
907  case java.sql.Types.TIMESTAMP:
908  case java.sql.Types
909  .BIT: // deal with postgress treating boolean as bit... this will bite me
910  case java.sql.Types.BOOLEAN:
911  case java.sql.Types.DATE:
912  case java.sql.Types.DECIMAL:
913  case java.sql.Types.NUMERIC:
914  col.data.int_col.clear();
915  break;
916 
917  case java.sql.Types.FLOAT:
918  case java.sql.Types.DOUBLE:
919  case java.sql.Types.REAL:
920  col.data.real_col.clear();
921  break;
922 
923  case java.sql.Types.NVARCHAR:
924  case java.sql.Types.VARCHAR:
925  case java.sql.Types.NCHAR:
926  case java.sql.Types.CHAR:
927  case java.sql.Types.LONGVARCHAR:
928  case java.sql.Types.LONGNVARCHAR:
929  col.data.str_col.clear();
930  break;
931 
932  // Handle WKT for geo columns
933  case java.sql.Types.OTHER:
934  col.data.str_col.clear();
935  break;
936 
937  default:
938  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
939  }
940  }

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.run_init ( Connection  conn)
inlineprivate

Definition at line 465 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.cmd, and Experimental.String.

Referenced by com.mapd.utility.SQLImporter.executeQuery().

465  {
466  // attempt to open file
467  String line = "";
468  try {
469  BufferedReader reader =
470  new BufferedReader(new FileReader(cmd.getOptionValue("initializeFile")));
471  Statement stmt = conn.createStatement();
472  while ((line = reader.readLine()) != null) {
473  if (line.isEmpty()) {
474  continue;
475  }
476  LOGGER.info("Running : " + line);
477  stmt.execute(line);
478  }
479  stmt.close();
480  reader.close();
481  } catch (IOException e) {
482  LOGGER.error("Exception occurred trying to read initialize file: "
483  + cmd.getOptionValue("initFile"));
484  exit(1);
485  } catch (SQLException e) {
486  LOGGER.error(
487  "Exception occurred trying to execute initialize file entry : " + line);
488  exit(1);
489  }
490  }

+ Here is the caller graph for this function:

void com.mapd.utility.SQLImporter.setColValue ( ResultSet  rs,
TColumn  col,
int  columnType,
int  colNum,
int  scale,
String  colTypeName 
) throws SQLException
inlineprivate

Definition at line 772 of file SQLImporter.java.

References Double, and Experimental.String.

Referenced by com.mapd.utility.SQLImporter.executeQuery().

777  {
778  switch (columnType) {
779  case java.sql.Types
780  .BIT: // deal with postgress treating boolean as bit... this will bite me
781  case java.sql.Types.BOOLEAN:
782  Boolean b = rs.getBoolean(colNum);
783  if (rs.wasNull()) {
784  col.nulls.add(Boolean.TRUE);
785  col.data.int_col.add(0L);
786  } else {
787  col.nulls.add(Boolean.FALSE);
788  col.data.int_col.add(b ? 1L : 0L);
789  }
790  break;
791 
792  case java.sql.Types.DECIMAL:
793  case java.sql.Types.NUMERIC:
794  BigDecimal bd = rs.getBigDecimal(colNum);
795  if (rs.wasNull()) {
796  col.nulls.add(Boolean.TRUE);
797  col.data.int_col.add(0L);
798  } else {
799  col.nulls.add(Boolean.FALSE);
800  col.data.int_col.add(bd.multiply(new BigDecimal(pow(10L, scale))).longValue());
801  }
802  break;
803 
804  case java.sql.Types.TINYINT:
805  case java.sql.Types.SMALLINT:
806  case java.sql.Types.INTEGER:
807  case java.sql.Types.BIGINT:
808  Long l = rs.getLong(colNum);
809  if (rs.wasNull()) {
810  col.nulls.add(Boolean.TRUE);
811  col.data.int_col.add(new Long(0));
812  } else {
813  col.nulls.add(Boolean.FALSE);
814  col.data.int_col.add(l);
815  }
816  break;
817 
818  case java.sql.Types.TIME:
819  Time t = rs.getTime(colNum);
820  if (rs.wasNull()) {
821  col.nulls.add(Boolean.TRUE);
822  col.data.int_col.add(0L);
823 
824  } else {
825  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(t.getTime()));
826  col.nulls.add(Boolean.FALSE);
827  }
828 
829  break;
830  case java.sql.Types.TIMESTAMP:
831  Timestamp ts = rs.getTimestamp(colNum);
832  if (rs.wasNull()) {
833  col.nulls.add(Boolean.TRUE);
834  col.data.int_col.add(0L);
835 
836  } else {
837  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(ts.getTime()));
838  col.nulls.add(Boolean.FALSE);
839  }
840 
841  break;
842  case java.sql.Types.DATE:
843  Date d = rs.getDate(colNum);
844  if (rs.wasNull()) {
845  col.nulls.add(Boolean.TRUE);
846  col.data.int_col.add(0L);
847 
848  } else {
849  col.data.int_col.add(dateTimeUtils.getSecondsFromMilliseconds(d.getTime()));
850  col.nulls.add(Boolean.FALSE);
851  }
852  break;
853  case java.sql.Types.FLOAT:
854  case java.sql.Types.DOUBLE:
855  case java.sql.Types.REAL:
856  Double db = rs.getDouble(colNum);
857  if (rs.wasNull()) {
858  col.nulls.add(Boolean.TRUE);
859  col.data.real_col.add(new Double(0));
860 
861  } else {
862  col.nulls.add(Boolean.FALSE);
863  col.data.real_col.add(db);
864  }
865  break;
866 
867  case java.sql.Types.NVARCHAR:
868  case java.sql.Types.VARCHAR:
869  case java.sql.Types.NCHAR:
870  case java.sql.Types.CHAR:
871  case java.sql.Types.LONGVARCHAR:
872  case java.sql.Types.LONGNVARCHAR:
873  String strVal = rs.getString(colNum);
874  if (rs.wasNull()) {
875  col.nulls.add(Boolean.TRUE);
876  col.data.str_col.add("");
877 
878  } else {
879  col.data.str_col.add(strVal);
880  col.nulls.add(Boolean.FALSE);
881  }
882  break;
883  case java.sql.Types.OTHER:
884  if (rs.wasNull()) {
885  col.nulls.add(Boolean.TRUE);
886  col.data.str_col.add("");
887  } else {
888  col.data.str_col.add(vendor_types.get_wkt(rs, colNum, colTypeName));
889  col.nulls.add(Boolean.FALSE);
890  }
891  break;
892  default:
893  throw new AssertionError("Column type " + columnType + " not Supported");
894  }
895  }

+ Here is the caller graph for this function:

TColumn com.mapd.utility.SQLImporter.setupBinaryColumn ( int  i,
ResultSetMetaData  md,
int  bufferSize 
) throws SQLException
inlineprivate

Definition at line 726 of file SQLImporter.java.

Referenced by com.mapd.utility.SQLImporter.executeQuery().

727  {
728  TColumn col = new TColumn();
729 
730  col.nulls = new ArrayList<Boolean>(bufferSize);
731 
732  col.data = new TColumnData();
733 
734  switch (md.getColumnType(i)) {
735  case java.sql.Types.TINYINT:
736  case java.sql.Types.SMALLINT:
737  case java.sql.Types.INTEGER:
738  case java.sql.Types.BIGINT:
739  case java.sql.Types.TIME:
740  case java.sql.Types.TIMESTAMP:
741  case java.sql.Types
742  .BIT: // deal with postgress treating boolean as bit... this will bite me
743  case java.sql.Types.BOOLEAN:
744  case java.sql.Types.DATE:
745  case java.sql.Types.DECIMAL:
746  case java.sql.Types.NUMERIC:
747  col.data.int_col = new ArrayList<Long>(bufferSize);
748  break;
749 
750  case java.sql.Types.FLOAT:
751  case java.sql.Types.DOUBLE:
752  case java.sql.Types.REAL:
753  col.data.real_col = new ArrayList<Double>(bufferSize);
754  break;
755 
756  case java.sql.Types.NVARCHAR:
757  case java.sql.Types.VARCHAR:
758  case java.sql.Types.NCHAR:
759  case java.sql.Types.CHAR:
760  case java.sql.Types.LONGVARCHAR:
761  case java.sql.Types.LONGNVARCHAR:
762  case java.sql.Types.OTHER:
763  col.data.str_col = new ArrayList<String>(bufferSize);
764  break;
765 
766  default:
767  throw new AssertionError("Column type " + md.getColumnType(i) + " not Supported");
768  }
769  return col;
770  }

+ Here is the caller graph for this function:

boolean com.mapd.utility.SQLImporter.tableExists ( String  tName)
inlineprivate

Definition at line 643 of file SQLImporter.java.

References com.mapd.utility.SQLImporter.session, and Experimental.String.

Referenced by com.mapd.utility.SQLImporter.checkMapDTable().

643  {
644  LOGGER.debug("Check for table " + tName);
645  try {
646  List<String> recv_get_tables = client.get_tables(session);
647  for (String s : recv_get_tables) {
648  if (s.equals(tName)) {
649  return true;
650  }
651  }
652  } catch (TMapDException ex) {
653  LOGGER.error("Table check failed - " + ex.toString());
654  exit(3);
655  } catch (TException ex) {
656  LOGGER.error("Table check failed - " + ex.toString());
657  exit(3);
658  }
659  return false;
660  }

+ Here is the caller graph for this function:

Member Data Documentation

MapD.Client com.mapd.utility.SQLImporter.client = null
protected

Definition at line 309 of file SQLImporter.java.

Referenced by com.mapd.utility.SQLImporter.createMapDConnection().

DateTimeUtils com.mapd.utility.SQLImporter.dateTimeUtils
private
Initial value:
= (milliseconds) -> {
return milliseconds / 1000;
}

Definition at line 312 of file SQLImporter.java.

final Logger com.mapd.utility.SQLImporter.LOGGER = LoggerFactory.getLogger(SQLImporter.class)
staticpackage

Definition at line 311 of file SQLImporter.java.

Db_vendor_types com.mapd.utility.SQLImporter.vendor_types = null
package

Definition at line 316 of file SQLImporter.java.

Referenced by com.mapd.utility.SQLImporter.executeQuery().


The documentation for this class was generated from the following file: