OmniSciDB  085a039ca4
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Friends Macros Groups Pages
MetaConnect.java
Go to the documentation of this file.
1 /*
2  * Copyright 2022 HEAVY.AI, Inc.
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  * http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package com.mapd.metadata;
17 
18 import com.google.common.collect.ImmutableList;
19 import com.google.gson.Gson;
20 import com.google.gson.JsonArray;
21 import com.google.gson.JsonElement;
22 import com.google.gson.JsonObject;
28 
29 import org.apache.calcite.schema.Table;
30 import org.apache.thrift.TException;
31 import org.apache.thrift.protocol.TBinaryProtocol;
32 import org.apache.thrift.protocol.TProtocol;
33 import org.apache.thrift.transport.TServerSocket;
34 import org.apache.thrift.transport.TSocket;
35 import org.apache.thrift.transport.TTransport;
36 import org.apache.thrift.transport.TTransportException;
37 import org.slf4j.Logger;
38 import org.slf4j.LoggerFactory;
39 
40 import java.io.File;
41 import java.io.FileInputStream;
42 import java.io.IOException;
43 import java.sql.Connection;
44 import java.sql.DriverManager;
45 import java.sql.ResultSet;
46 import java.sql.SQLException;
47 import java.sql.Statement;
48 import java.util.ArrayList;
49 import java.util.HashSet;
50 import java.util.List;
51 import java.util.Map;
52 import java.util.Map.Entry;
53 import java.util.Set;
54 import java.util.concurrent.ConcurrentHashMap;
55 
56 import ai.heavy.thrift.server.Heavy;
57 import ai.heavy.thrift.server.TColumnType;
58 import ai.heavy.thrift.server.TDBException;
59 import ai.heavy.thrift.server.TDBInfo;
60 import ai.heavy.thrift.server.TDatumType;
61 import ai.heavy.thrift.server.TEncodingType;
62 import ai.heavy.thrift.server.TTableDetails;
63 import ai.heavy.thrift.server.TTypeInfo;
64 
69 public class MetaConnect {
70  final static Logger HEAVYDBLOGGER = LoggerFactory.getLogger(MetaConnect.class);
71  private final String dataDir;
72  private final String default_db;
73  private final HeavyDBUser currentUser;
74  private final int dbPort;
75  private Connection catConn;
76  private final HeavyDBParser parser;
77 
78  private static final int KBOOLEAN = 1;
79  private static final int KCHAR = 2;
80  private static final int KVARCHAR = 3;
81  private static final int KNUMERIC = 4;
82  private static final int KDECIMAL = 5;
83  private static final int KINT = 6;
84  private static final int KSMALLINT = 7;
85  private static final int KFLOAT = 8;
86  private static final int KDOUBLE = 9;
87  private static final int KTIME = 10;
88  private static final int KTIMESTAMP = 11;
89  private static final int KBIGINT = 12;
90  private static final int KTEXT = 13;
91  private static final int KDATE = 14;
92  private static final int KARRAY = 15;
93  private static final int KINTERVAL_DAY_TIME = 16;
94  private static final int KINTERVAL_YEAR_MONTH = 17;
95  private static final int KPOINT = 18;
96  private static final int KLINESTRING = 19;
97  private static final int KPOLYGON = 20;
98  private static final int KMULTIPOLYGON = 21;
99  private static final int KTINYINT = 22;
100 
101  private static final String CATALOG_DIR_NAME = "catalogs";
102  private static volatile Map<String, Set<String>> DATABASE_TO_TABLES =
103  new ConcurrentHashMap<>();
104  private static volatile Map<List<String>, Table> DB_TABLE_DETAILS =
105  new ConcurrentHashMap<>();
107 
108  public MetaConnect(int dbPort,
109  String dataDir,
110  HeavyDBUser currentHeavyDBUser,
113  String db) {
114  this.dataDir = dataDir;
115  if (db != null) {
116  this.default_db = db;
117  } else {
118  if (currentHeavyDBUser != null) {
119  this.default_db = currentHeavyDBUser.getDB();
120  } else {
121  this.default_db = null;
122  }
123  }
124  this.currentUser = currentHeavyDBUser;
125  this.dbPort = dbPort;
126  this.parser = parser;
127  this.sock_transport_properties = skT;
128 
129  // check to see if we have a populated DATABASE_TO_TABLES structure
130  // first time in we need to make sure this gets populated
131  // It is OK to use a MetaConnect without a user
132  // but it should not attempt to populate the DB
133  if (currentUser != null && DATABASE_TO_TABLES.size() == 0) {
134  // get all databases
136  }
137  }
138 
139  public MetaConnect(int dbPort,
140  String dataDir,
141  HeavyDBUser currentHeavyDBUser,
144  this(dbPort, dataDir, currentHeavyDBUser, parser, skT, null);
145  }
146 
147  public List<String> getDatabases() {
148  List<String> dbList = new ArrayList<String>(DATABASE_TO_TABLES.size());
149  for (String db : DATABASE_TO_TABLES.keySet()) {
150  dbList.add(db);
151  }
152  return dbList;
153  }
154 
155  private void connectToCatalog(String catalog) {
156  try {
157  // try {
158  Class.forName("org.sqlite.JDBC");
159  } catch (ClassNotFoundException ex) {
160  String err = "Could not find class for metadata connection; DB: '" + catalog
161  + "' data dir '" + dataDir + "', error was " + ex.getMessage();
162  HEAVYDBLOGGER.error(err);
163  throw new RuntimeException(err);
164  }
165  String connectURL = "jdbc:sqlite:" + dataDir + "/" + CATALOG_DIR_NAME + "/" + catalog;
166  try {
167  catConn = DriverManager.getConnection(connectURL);
168  } catch (SQLException ex) {
169  String err = "Could not establish a connection for metadata; DB: '" + catalog
170  + "' data dir '" + dataDir + "', error was " + ex.getMessage();
171  HEAVYDBLOGGER.error(err);
172  throw new RuntimeException(err);
173  }
174  HEAVYDBLOGGER.debug("Opened database successfully");
175  }
176 
177  private void disconnectFromCatalog() {
178  try {
179  catConn.close();
180  } catch (SQLException ex) {
181  String err = "Could not disconnect from metadata "
182  + " data dir '" + dataDir + "', error was " + ex.getMessage();
183  HEAVYDBLOGGER.error(err);
184  throw new RuntimeException(err);
185  }
186  }
187 
188  private void connectToDBCatalog() {
190  }
191 
192  public Table getTable(String tableName) {
193  List<String> dbTable =
194  ImmutableList.of(default_db.toUpperCase(), tableName.toUpperCase());
195  Table cTable = DB_TABLE_DETAILS.get(dbTable);
196  if (cTable != null) {
197  HEAVYDBLOGGER.debug("Metaconnect DB " + default_db + " get table " + tableName
198  + " details " + cTable);
199  return cTable;
200  }
201 
202  TTableDetails td = get_table_details(tableName);
203 
204  if (td.getView_sql() == null || td.getView_sql().isEmpty()) {
205  HEAVYDBLOGGER.debug("Processing a table");
206  Table rTable = new HeavyDBTable(td);
207  DB_TABLE_DETAILS.putIfAbsent(dbTable, rTable);
208  HEAVYDBLOGGER.debug("Metaconnect DB " + default_db + " get table " + tableName
209  + " details " + rTable + " Not in buffer");
210  return rTable;
211  } else {
212  HEAVYDBLOGGER.debug("Processing a view");
213  Table rTable = new HeavyDBView(getViewSql(tableName), td, parser);
214  DB_TABLE_DETAILS.putIfAbsent(dbTable, rTable);
215  HEAVYDBLOGGER.debug("Metaconnect DB " + default_db + " get view " + tableName
216  + " details " + rTable + " Not in buffer");
217  return rTable;
218  }
219  }
220 
221  public Set<String> getTables() {
222  Set<String> mSet = DATABASE_TO_TABLES.get(default_db.toUpperCase());
223  if (mSet != null && mSet.size() > 0) {
224  HEAVYDBLOGGER.debug("Metaconnect DB getTables " + default_db + " tables " + mSet);
225  return mSet;
226  }
227 
228  if (dbPort == -1) {
229  // use sql
231  Set<String> ts = getTables_SQL();
233  DATABASE_TO_TABLES.put(default_db.toUpperCase(), ts);
234  HEAVYDBLOGGER.debug(
235  "Metaconnect DB getTables " + default_db + " tables " + ts + " from catDB");
236  return ts;
237  }
238  // use thrift direct to local server
239  try {
240  TProtocol protocol = null;
241  TTransport transport =
242  sock_transport_properties.openClientTransport("localhost", dbPort);
243  if (!transport.isOpen()) transport.open();
244  protocol = new TBinaryProtocol(transport);
245 
246  Heavy.Client client = new Heavy.Client(protocol);
247  List<String> tablesList =
248  client.get_tables_for_database(currentUser.getSession(), default_db);
249  Set<String> ts = new HashSet<String>(tablesList.size());
250  for (String tableName : tablesList) {
251  ts.add(tableName);
252  }
253 
254  transport.close();
255  DATABASE_TO_TABLES.put(default_db.toUpperCase(), ts);
256  HEAVYDBLOGGER.debug("Metaconnect DB getTables " + default_db + " tables " + ts
257  + " from server");
258  return ts;
259 
260  } catch (TTransportException ex) {
261  HEAVYDBLOGGER.error("TTransportException on port [" + dbPort + "]");
262  HEAVYDBLOGGER.error(ex.toString());
263  throw new RuntimeException(ex.toString());
264  } catch (TDBException ex) {
265  HEAVYDBLOGGER.error(ex.getError_msg());
266  throw new RuntimeException(ex.getError_msg());
267  } catch (TException ex) {
268  HEAVYDBLOGGER.error(ex.toString());
269  throw new RuntimeException(ex.toString());
270  }
271  }
272 
273  private Set<String> getTables_SQL() {
275  Set<String> tableSet = new HashSet<String>();
276  Statement stmt = null;
277  ResultSet rs = null;
278  String sqlText = "";
279  try {
280  stmt = catConn.createStatement();
281 
282  // get the tables
283  rs = stmt.executeQuery("SELECT name FROM mapd_tables ");
284  while (rs.next()) {
285  tableSet.add(rs.getString("name"));
286  /*--*/
287  HEAVYDBLOGGER.debug("Object name = " + rs.getString("name"));
288  }
289  rs.close();
290  stmt.close();
291 
292  } catch (Exception e) {
293  String err = "error trying to get all the tables, error was " + e.getMessage();
294  HEAVYDBLOGGER.error(err);
295  throw new RuntimeException(err);
296  }
298 
299  try {
300  // open temp table json file
301  final String filePath =
302  dataDir + "/" + CATALOG_DIR_NAME + "/" + default_db + "_temp_tables.json";
303  HEAVYDBLOGGER.debug("Opening temp table file at " + filePath);
304  String tempTablesJsonStr;
305  try {
306  File tempTablesFile = new File(filePath);
307  FileInputStream tempTablesStream = new FileInputStream(tempTablesFile);
308  byte[] data = new byte[(int) tempTablesFile.length()];
309  tempTablesStream.read(data);
310  tempTablesStream.close();
311 
312  tempTablesJsonStr = new String(data, "UTF-8");
313  } catch (java.io.FileNotFoundException e) {
314  return tableSet;
315  }
316 
317  Gson gson = new Gson();
318  JsonObject fileParentObject = gson.fromJson(tempTablesJsonStr, JsonObject.class);
319  for (Entry<String, JsonElement> member : fileParentObject.entrySet()) {
320  String tableName = member.getKey();
321  tableSet.add(tableName);
322  /*--*/
323  HEAVYDBLOGGER.debug("Temp table object name = " + tableName);
324  }
325 
326  } catch (Exception e) {
327  String err = "error trying to load temporary tables from json file, error was "
328  + e.getMessage();
329  HEAVYDBLOGGER.error(err);
330  throw new RuntimeException(err);
331  }
332 
333  return tableSet;
334  }
335 
336  public TTableDetails get_table_details(String tableName) {
337  if (dbPort == -1) {
338  // use sql
340  TTableDetails td = get_table_detail_SQL(tableName);
342  return td;
343  }
344  // use thrift direct to local server
345  try {
346  TProtocol protocol = null;
347 
348  TTransport transport =
349  sock_transport_properties.openClientTransport("localhost", dbPort);
350  if (!transport.isOpen()) transport.open();
351  protocol = new TBinaryProtocol(transport);
352 
353  Heavy.Client client = new Heavy.Client(protocol);
354  TTableDetails td = client.get_internal_table_details_for_database(
355  currentUser.getSession(), tableName, default_db);
356  transport.close();
357 
358  return td;
359 
360  } catch (TTransportException ex) {
361  HEAVYDBLOGGER.error(ex.toString());
362  throw new RuntimeException(ex.toString());
363  } catch (TDBException ex) {
364  HEAVYDBLOGGER.error(ex.getError_msg());
365  throw new RuntimeException(ex.getError_msg());
366  } catch (TException ex) {
367  HEAVYDBLOGGER.error(ex.toString());
368  throw new RuntimeException(ex.toString());
369  }
370  }
371 
372  public static final int get_physical_cols(int type) {
373  switch (type) {
374  case KPOINT:
375  return 1; // coords
376  case KLINESTRING:
377  return 2; // coords, bounds
378  case KPOLYGON:
379  return 4; // coords, ring_sizes, bounds, render_group
380  case KMULTIPOLYGON:
381  return 5; // coords, ring_sizes, poly_rings, bounds, render_group
382  default:
383  break;
384  }
385  return 0;
386  }
387 
388  public static final boolean is_geometry(int type) {
389  return type == KPOINT || type == KLINESTRING || type == KPOLYGON
390  || type == KMULTIPOLYGON;
391  }
392 
393  private TTableDetails get_table_detail_SQL(String tableName) {
394  TTableDetails td = new TTableDetails();
395  td.getRow_descIterator();
396  int id = getTableId(tableName);
397  if (id == -1) {
398  try {
399  // need to mark it as temporary table
400  TTableDetails tempTableTd = get_table_detail_JSON(tableName);
401  tempTableTd.is_temporary = true;
402  return tempTableTd;
403  } catch (Exception e) {
404  String err =
405  "Table '" + tableName + "' does not exist for DB '" + default_db + "'";
406  HEAVYDBLOGGER.error(err);
407  throw new RuntimeException(err);
408  }
409  }
410 
411  // read data from table
412  Statement stmt = null;
413  ResultSet rs = null;
414  try {
415  stmt = catConn.createStatement();
416  HEAVYDBLOGGER.debug("table id is " + id);
417  HEAVYDBLOGGER.debug("table name is " + tableName);
418  String query = String.format(
419  "SELECT * FROM mapd_columns where tableid = %d and not is_deletedcol order by columnid;",
420  id);
421  HEAVYDBLOGGER.debug(query);
422  rs = stmt.executeQuery(query);
423  int skip_physical_cols = 0;
424  while (rs.next()) {
425  String colName = rs.getString("name");
426  HEAVYDBLOGGER.debug("name = " + colName);
427  int colType = rs.getInt("coltype");
428  HEAVYDBLOGGER.debug("coltype = " + colType);
429  int colSubType = rs.getInt("colsubtype");
430  HEAVYDBLOGGER.debug("colsubtype = " + colSubType);
431  int colDim = rs.getInt("coldim");
432  HEAVYDBLOGGER.debug("coldim = " + colDim);
433  int colScale = rs.getInt("colscale");
434  HEAVYDBLOGGER.debug("colscale = " + colScale);
435  boolean isNotNull = rs.getBoolean("is_notnull");
436  HEAVYDBLOGGER.debug("is_notnull = " + isNotNull);
437  boolean isSystemCol = rs.getBoolean("is_systemcol");
438  HEAVYDBLOGGER.debug("is_systemcol = " + isSystemCol);
439  boolean isVirtualCol = rs.getBoolean("is_virtualcol");
440  HEAVYDBLOGGER.debug("is_vitrualcol = " + isVirtualCol);
441  HEAVYDBLOGGER.debug("");
442  TColumnType tct = new TColumnType();
443  TTypeInfo tti = new TTypeInfo();
444  TDatumType tdt;
445 
446  if (colType == KARRAY) {
447  tti.is_array = true;
448  tdt = typeToThrift(colSubType);
449  } else {
450  tti.is_array = false;
451  tdt = typeToThrift(colType);
452  }
453 
454  tti.nullable = !isNotNull;
455  tti.encoding = TEncodingType.NONE;
456  tti.type = tdt;
457  tti.scale = colScale;
458  tti.precision = colDim;
459 
460  tct.col_name = colName;
461  tct.col_type = tti;
462  tct.is_system = isSystemCol;
463 
464  if (skip_physical_cols <= 0) skip_physical_cols = get_physical_cols(colType);
465  if (is_geometry(colType) || skip_physical_cols-- <= 0) td.addToRow_desc(tct);
466  }
467  } catch (Exception e) {
468  String err = "error trying to read from mapd_columns, error was " + e.getMessage();
469  HEAVYDBLOGGER.error(err);
470  throw new RuntimeException(err);
471  } finally {
472  if (rs != null) {
473  try {
474  rs.close();
475  } catch (SQLException ex) {
476  String err = "Could not close resultset, error was " + ex.getMessage();
477  HEAVYDBLOGGER.error(err);
478  throw new RuntimeException(err);
479  }
480  }
481  if (stmt != null) {
482  try {
483  stmt.close();
484  } catch (SQLException ex) {
485  String err = "Could not close stmt, error was " + ex.getMessage();
486  HEAVYDBLOGGER.error(err);
487  throw new RuntimeException(err);
488  }
489  }
490  }
491  if (isView(tableName)) {
492  td.setView_sqlIsSet(true);
493  td.setView_sql(getViewSqlViaSql(id));
494  }
495  return td;
496  }
497 
498  private TTableDetails get_table_detail_JSON(String tableName)
499  throws IOException, RuntimeException {
500  TTableDetails td = new TTableDetails();
501  td.getRow_descIterator();
502 
503  // open table json file
504  final String filePath =
505  dataDir + "/" + CATALOG_DIR_NAME + "/" + default_db + "_temp_tables.json";
506  HEAVYDBLOGGER.debug("Opening temp table file at " + filePath);
507 
508  String tempTablesJsonStr;
509  try {
510  File tempTablesFile = new File(filePath);
511  FileInputStream tempTablesStream = new FileInputStream(tempTablesFile);
512  byte[] data = new byte[(int) tempTablesFile.length()];
513  tempTablesStream.read(data);
514  tempTablesStream.close();
515 
516  tempTablesJsonStr = new String(data, "UTF-8");
517  } catch (java.io.FileNotFoundException e) {
518  throw new RuntimeException("Failed to read temporary tables file.");
519  }
520 
521  Gson gson = new Gson();
522  JsonObject fileParentObject = gson.fromJson(tempTablesJsonStr, JsonObject.class);
523  if (fileParentObject == null) {
524  throw new IOException("Malformed temporary tables file.");
525  }
526 
527  JsonObject tableObject = fileParentObject.getAsJsonObject(tableName);
528  if (tableObject == null) {
529  throw new RuntimeException(
530  "Failed to find table " + tableName + " in temporary tables file.");
531  }
532 
533  String jsonTableName = tableObject.get("name").getAsString();
534  assert (tableName == jsonTableName);
535  int id = tableObject.get("id").getAsInt();
536  HEAVYDBLOGGER.debug("table id is " + id);
537  HEAVYDBLOGGER.debug("table name is " + tableName);
538 
539  JsonArray jsonColumns = tableObject.getAsJsonArray("columns");
540  assert (jsonColumns != null);
541 
542  int skip_physical_cols = 0;
543  for (JsonElement columnElement : jsonColumns) {
544  JsonObject columnObject = columnElement.getAsJsonObject();
545 
546  String colName = columnObject.get("name").getAsString();
547  HEAVYDBLOGGER.debug("name = " + colName);
548  int colType = columnObject.get("coltype").getAsInt();
549  HEAVYDBLOGGER.debug("coltype = " + colType);
550  int colSubType = columnObject.get("colsubtype").getAsInt();
551  HEAVYDBLOGGER.debug("colsubtype = " + colSubType);
552  int colDim = columnObject.get("coldim").getAsInt();
553  HEAVYDBLOGGER.debug("coldim = " + colDim);
554  int colScale = columnObject.get("colscale").getAsInt();
555  HEAVYDBLOGGER.debug("colscale = " + colScale);
556  boolean isNotNull = columnObject.get("is_notnull").getAsBoolean();
557  HEAVYDBLOGGER.debug("is_notnull = " + isNotNull);
558  boolean isSystemCol = columnObject.get("is_systemcol").getAsBoolean();
559  HEAVYDBLOGGER.debug("is_systemcol = " + isSystemCol);
560  boolean isVirtualCol = columnObject.get("is_virtualcol").getAsBoolean();
561  HEAVYDBLOGGER.debug("is_vitrualcol = " + isVirtualCol);
562  boolean isDeletedCol = columnObject.get("is_deletedcol").getAsBoolean();
563  HEAVYDBLOGGER.debug("is_deletedcol = " + isDeletedCol);
564  HEAVYDBLOGGER.debug("");
565 
566  if (isDeletedCol) {
567  HEAVYDBLOGGER.debug("Skipping delete column.");
568  continue;
569  }
570 
571  TColumnType tct = new TColumnType();
572  TTypeInfo tti = new TTypeInfo();
573  TDatumType tdt;
574 
575  if (colType == KARRAY) {
576  tti.is_array = true;
577  tdt = typeToThrift(colSubType);
578  } else {
579  tti.is_array = false;
580  tdt = typeToThrift(colType);
581  }
582 
583  tti.nullable = !isNotNull;
584  tti.encoding = TEncodingType.NONE;
585  tti.type = tdt;
586  tti.scale = colScale;
587  tti.precision = colDim;
588 
589  tct.col_name = colName;
590  tct.col_type = tti;
591  tct.is_system = isSystemCol;
592 
593  if (skip_physical_cols <= 0) skip_physical_cols = get_physical_cols(colType);
594  if (is_geometry(colType) || skip_physical_cols-- <= 0) td.addToRow_desc(tct);
595  }
596 
597  return td;
598  }
599 
600  private int getTableId(String tableName) {
601  Statement stmt = null;
602  ResultSet rs = null;
603  int tableId = -1;
604  try {
605  stmt = catConn.createStatement();
606  rs = stmt.executeQuery(String.format(
607  "SELECT tableid FROM mapd_tables where name = '%s' COLLATE NOCASE;",
608  tableName));
609  while (rs.next()) {
610  tableId = rs.getInt("tableid");
611  HEAVYDBLOGGER.debug("tableId = " + tableId);
612  HEAVYDBLOGGER.debug("");
613  }
614  rs.close();
615  stmt.close();
616  } catch (Exception e) {
617  String err = "Error trying to read from metadata table mapd_tables;DB: "
618  + default_db + " data dir " + dataDir + ", error was " + e.getMessage();
619  HEAVYDBLOGGER.error(err);
620  throw new RuntimeException(err);
621  } finally {
622  if (rs != null) {
623  try {
624  rs.close();
625  } catch (SQLException ex) {
626  String err = "Could not close resultset, error was " + ex.getMessage();
627  HEAVYDBLOGGER.error(err);
628  throw new RuntimeException(err);
629  }
630  }
631  if (stmt != null) {
632  try {
633  stmt.close();
634  } catch (SQLException ex) {
635  String err = "Could not close stmt, error was " + ex.getMessage();
636  HEAVYDBLOGGER.error(err);
637  throw new RuntimeException(err);
638  }
639  }
640  }
641  return (tableId);
642  }
643 
644  private boolean isView(String tableName) {
645  Statement stmt;
646  ResultSet rs;
647  int viewFlag = 0;
648  try {
649  stmt = catConn.createStatement();
650  rs = stmt.executeQuery(String.format(
651  "SELECT isview FROM mapd_tables where name = '%s' COLLATE NOCASE;",
652  tableName));
653  while (rs.next()) {
654  viewFlag = rs.getInt("isview");
655  HEAVYDBLOGGER.debug("viewFlag = " + viewFlag);
656  HEAVYDBLOGGER.debug("");
657  }
658  rs.close();
659  stmt.close();
660  } catch (Exception e) {
661  String err = "error trying to read from mapd_views, error was " + e.getMessage();
662  HEAVYDBLOGGER.error(err);
663  throw new RuntimeException(err);
664  }
665  return (viewFlag == 1);
666  }
667 
668  private String getViewSql(String tableName) {
669  String sqlText;
670  if (dbPort == -1) {
671  // use sql
673  sqlText = getViewSqlViaSql(getTableId(tableName));
675  } else {
676  // use thrift direct to local server
677  try {
678  TProtocol protocol = null;
679 
680  TTransport transport =
681  sock_transport_properties.openClientTransport("localhost", dbPort);
682  if (!transport.isOpen()) transport.open();
683  protocol = new TBinaryProtocol(transport);
684 
685  Heavy.Client client = new Heavy.Client(protocol);
686  TTableDetails td = client.get_table_details_for_database(
687  currentUser.getSession(), tableName, default_db);
688  transport.close();
689 
690  sqlText = td.getView_sql();
691 
692  } catch (TTransportException ex) {
693  HEAVYDBLOGGER.error(ex.toString());
694  throw new RuntimeException(ex.toString());
695  } catch (TDBException ex) {
696  HEAVYDBLOGGER.error(ex.getError_msg());
697  throw new RuntimeException(ex.getError_msg());
698  } catch (TException ex) {
699  HEAVYDBLOGGER.error(ex.toString());
700  throw new RuntimeException(ex.toString());
701  }
702  }
703  /* return string without the sqlite's trailing semicolon */
704  if (sqlText.charAt(sqlText.length() - 1) == ';') {
705  return (sqlText.substring(0, sqlText.length() - 1));
706  } else {
707  return (sqlText);
708  }
709  }
710 
711  // we assume there is already a DB connection here
712  private String getViewSqlViaSql(int tableId) {
713  Statement stmt;
714  ResultSet rs;
715  String sqlText = "";
716  try {
717  stmt = catConn.createStatement();
718  rs = stmt.executeQuery(String.format(
719  "SELECT sql FROM mapd_views where tableid = '%s' COLLATE NOCASE;",
720  tableId));
721  while (rs.next()) {
722  sqlText = rs.getString("sql");
723  HEAVYDBLOGGER.debug("View definition = " + sqlText);
724  HEAVYDBLOGGER.debug("");
725  }
726  rs.close();
727  stmt.close();
728  } catch (Exception e) {
729  String err = "error trying to read from mapd_views, error was " + e.getMessage();
730  HEAVYDBLOGGER.error(err);
731  throw new RuntimeException(err);
732  }
733  if (sqlText == null || sqlText.length() == 0) {
734  String err = "No view text found";
735  HEAVYDBLOGGER.error(err);
736  throw new RuntimeException(err);
737  }
738  return sqlText;
739  }
740 
741  private TDatumType typeToThrift(int type) {
742  switch (type) {
743  case KBOOLEAN:
744  return TDatumType.BOOL;
745  case KTINYINT:
746  return TDatumType.TINYINT;
747  case KSMALLINT:
748  return TDatumType.SMALLINT;
749  case KINT:
750  return TDatumType.INT;
751  case KBIGINT:
752  return TDatumType.BIGINT;
753  case KFLOAT:
754  return TDatumType.FLOAT;
755  case KNUMERIC:
756  case KDECIMAL:
757  return TDatumType.DECIMAL;
758  case KDOUBLE:
759  return TDatumType.DOUBLE;
760  case KTEXT:
761  case KVARCHAR:
762  case KCHAR:
763  return TDatumType.STR;
764  case KTIME:
765  return TDatumType.TIME;
766  case KTIMESTAMP:
767  return TDatumType.TIMESTAMP;
768  case KDATE:
769  return TDatumType.DATE;
770  case KINTERVAL_DAY_TIME:
771  return TDatumType.INTERVAL_DAY_TIME;
773  return TDatumType.INTERVAL_YEAR_MONTH;
774  case KPOINT:
775  return TDatumType.POINT;
776  case KLINESTRING:
777  return TDatumType.LINESTRING;
778  case KPOLYGON:
779  return TDatumType.POLYGON;
780  case KMULTIPOLYGON:
781  return TDatumType.MULTIPOLYGON;
782  default:
783  return null;
784  }
785  }
786 
787  private void populateDatabases() {
788  // TODO 13 Mar 2021 MAT
789  // this probably has to come across from the server on first start up rather
790  // than lazy instantiation here
791  // as a user may not be able to see all schemas and this sets it for the life
792  // of the server.
793  // Proceeding this way as a WIP
794  if (dbPort == 0) {
795  // seems to be a condition that is expected
796  // for FSI testing
797  return;
798  }
799  if (dbPort == -1) {
800  // use sql
801  connectToCatalog("system_catalog"); // hardcoded sys catalog
802  Set<String> dbNames = getDatabases_SQL();
804  for (String dbName : dbNames) {
805  Set<String> ts = new HashSet<String>();
806  DATABASE_TO_TABLES.putIfAbsent(dbName, ts);
807  }
808  return;
809  }
810  // use thrift direct to local server
811  try {
812  TProtocol protocol = null;
813  TTransport transport =
814  sock_transport_properties.openClientTransport("localhost", dbPort);
815  if (!transport.isOpen()) transport.open();
816  protocol = new TBinaryProtocol(transport);
817 
818  Heavy.Client client = new Heavy.Client(protocol);
819 
820  List<TDBInfo> dbList = client.get_databases(currentUser.getSession());
821  for (TDBInfo dbInfo : dbList) {
822  Set<String> ts = new HashSet<String>();
823  DATABASE_TO_TABLES.putIfAbsent(dbInfo.db_name, ts);
824  }
825  transport.close();
826 
827  } catch (TTransportException ex) {
828  HEAVYDBLOGGER.error("TTransportException on port [" + dbPort + "]");
829  HEAVYDBLOGGER.error(ex.toString());
830  throw new RuntimeException(ex.toString());
831  } catch (TDBException ex) {
832  HEAVYDBLOGGER.error(ex.getError_msg());
833  throw new RuntimeException(ex.getError_msg());
834  } catch (TException ex) {
835  HEAVYDBLOGGER.error(ex.toString());
836  throw new RuntimeException(ex.toString());
837  }
838  }
839 
840  private Set<String> getDatabases_SQL() {
841  Set<String> dbSet = new HashSet<String>();
842  Statement stmt = null;
843  ResultSet rs = null;
844  String sqlText = "";
845  try {
846  stmt = catConn.createStatement();
847 
848  // get the tables
849  rs = stmt.executeQuery("SELECT name FROM mapd_databases ");
850  while (rs.next()) {
851  dbSet.add(rs.getString("name"));
852  /*--*/
853  HEAVYDBLOGGER.debug("Object name = " + rs.getString("name"));
854  }
855  rs.close();
856  stmt.close();
857 
858  } catch (Exception e) {
859  String err = "error trying to get all the databases, error was " + e.getMessage();
860  HEAVYDBLOGGER.error(err);
861  throw new RuntimeException(err);
862  }
863  return dbSet;
864  }
865 
866  public void updateMetaData(String schema, String table) {
867  // Check if table is specified, if not we are dropping an entire DB so need to
868  // remove all
869  // tables for that DB
870  if (table.equals("")) {
871  // Drop db and all tables
872  // iterate through all and remove matching schema
873  Set<List<String>> all = new HashSet<>(DB_TABLE_DETAILS.keySet());
874  for (List<String> keys : all) {
875  if (keys.get(0).equals(schema.toUpperCase())) {
876  HEAVYDBLOGGER.debug(
877  "removing all for schema " + keys.get(0) + " table " + keys.get(1));
878  DB_TABLE_DETAILS.remove(keys);
879  }
880  }
881  } else {
882  HEAVYDBLOGGER.debug("removing schema " + schema.toUpperCase() + " table "
883  + table.toUpperCase());
884  DB_TABLE_DETAILS.remove(
885  ImmutableList.of(schema.toUpperCase(), table.toUpperCase()));
886  }
887  // Invalidate views
888  Set<List<String>> all = new HashSet<>(DB_TABLE_DETAILS.keySet());
889  for (List<String> keys : all) {
890  if (keys.get(0).equals(schema.toUpperCase())) {
891  Table ttable = DB_TABLE_DETAILS.get(keys);
892  if (ttable instanceof HeavyDBView) {
893  HEAVYDBLOGGER.debug(
894  "removing view in schema " + keys.get(0) + " view " + keys.get(1));
895  DB_TABLE_DETAILS.remove(keys);
896  }
897  }
898  }
899  // Could be a removal or an add request for a DB
900  Set<String> mSet = DATABASE_TO_TABLES.get(schema.toUpperCase());
901  if (mSet != null) {
902  HEAVYDBLOGGER.debug("removing schema " + schema.toUpperCase());
903  DATABASE_TO_TABLES.remove(schema.toUpperCase());
904  } else {
905  // add a empty database descriptor for new DB, it will be lazily populated when
906  // required
907  Set<String> ts = new HashSet<String>();
908  DATABASE_TO_TABLES.putIfAbsent(schema.toUpperCase(), ts);
909  }
910  }
911 }
static volatile Map< String, Set< String > > DATABASE_TO_TABLES
Table getTable(String tableName)
static final int get_physical_cols(int type)
final HeavyDBUser currentUser
static final int KMULTIPOLYGON
void updateMetaData(String schema, String table)
String getViewSql(String tableName)
static final boolean is_geometry(int type)
MetaConnect(int dbPort, String dataDir, HeavyDBUser currentHeavyDBUser, HeavyDBParser parser, SockTransportProperties skT)
String getViewSqlViaSql(int tableId)
TTableDetails get_table_details(String tableName)
static volatile Map< List< String >, Table > DB_TABLE_DETAILS
static final int KINTERVAL_YEAR_MONTH
final SockTransportProperties sock_transport_properties
static final int KLINESTRING
static final String CATALOG_DIR_NAME
void connectToCatalog(String catalog)
static final Logger HEAVYDBLOGGER
final HeavyDBParser parser
TTableDetails get_table_detail_SQL(String tableName)
TDatumType typeToThrift(int type)
Set< String > getDatabases_SQL()
int getTableId(String tableName)
MetaConnect(int dbPort, String dataDir, HeavyDBUser currentHeavyDBUser, HeavyDBParser parser, SockTransportProperties skT, String db)
TTableDetails get_table_detail_JSON(String tableName)
static final int KINTERVAL_DAY_TIME
boolean isView(String tableName)