Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 55 additions & 27 deletions reverse_engineering/api.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,21 +48,36 @@ const getExternalBrowserUrl = async (connectionInfo, logger, cb) => {
}
};

const getDatabases = (connectionInfo, logger, cb) => {
cb();
const getDatabases = async (connectionInfo, logger, cb) => {
try {
logger.clear();

if (connectionInfo.databaseName) {
cb(null, [connectionInfo.databaseName]);
return;
}

await snowflakeHelper.connect(logger, connectionInfo);

const databaseNames = await snowflakeHelper.getDatabaseNames({ logger });

cb(null, databaseNames);
} catch (err) {
handleError(logger, err, cb);
}
};

const getDocumentKinds = (connectionInfo, logger, cb) => {
cb();
cb(null, []);
};

const getDbCollectionsNames = async (connectionInfo, logger, cb) => {
try {
logger.clear();
await snowflakeHelper.connect(logger, connectionInfo);
const schemasInfo = await snowflakeHelper.getSchemasInfo();
const databaseName = connectionInfo.database || connectionInfo.databaseName;
const schemasInfo = await snowflakeHelper.getSchemasInfo(databaseName);
logger.log('info', { schemas: schemasInfo }, 'Found schemas');
const namesBySchemas = await snowflakeHelper.getEntitiesNames({ logger });
const namesBySchemas = await snowflakeHelper.getEntitiesNames({ databaseName, logger });

logger.log('info', { entities: namesBySchemas }, 'Found entities');

Expand All @@ -76,33 +91,42 @@ const getDbCollectionsData = async (data, logger, cb) => {
try {
logger.log('info', data, 'Retrieving schema', data.hiddenKeys);
const collections = data.collectionData.collections;
const dataBaseNames = data.collectionData.dataBaseNames;
const entitiesPromises = await dataBaseNames.reduce(async (packagesPromise, schema) => {
const schemaNames = data.collectionData.dataBaseNames;
const databaseName = data.database || data.databaseName;
const entitiesPromises = await schemaNames.reduce(async (packagesPromise, schemaName) => {
const packages = await packagesPromise;
const entities = snowflakeHelper.splitEntityNames(collections[schema]);
const fullSchemaName = databaseName + '.' + schemaName;
const entities = snowflakeHelper.splitEntityNames(collections[schemaName]);

const containerData = await snowflakeHelper.getContainerData({ schema, logger });
const [database, schemaName] = schema.split('.');
const containerData = await snowflakeHelper.getContainerData({ databaseName, schemaName, logger });

const tablesPackages = entities.tables.map(async table => {
const fullTableName = snowflakeHelper.getFullEntityName(schema, table);
logger.progress({ message: `Start getting data from table`, containerName: schema, entityName: table });
const fullTableName = snowflakeHelper.getFullEntityName(databaseName, schemaName, table);
logger.progress({
message: `Start getting data from table`,
containerName: fullSchemaName,
entityName: table,
});
logger.log(
'info',
{ message: `Start getting data from table`, containerName: schema, entityName: table },
{ message: `Start getting data from table`, containerName: fullSchemaName, entityName: table },
'Getting schema',
);
const ddl = await snowflakeHelper.getDDL(fullTableName, logger);
const quantity = await snowflakeHelper.getRowsCount(fullTableName);

logger.progress({
message: `Fetching record for JSON schema inference`,
containerName: schema,
containerName: fullSchemaName,
entityName: table,
});
logger.log(
'info',
{ message: `Fetching record for JSON schema inference`, containerName: schema, entityName: table },
{
message: `Fetching record for JSON schema inference`,
containerName: fullSchemaName,
entityName: table,
},
'Getting schema',
);

Expand All @@ -114,19 +138,23 @@ const getDbCollectionsData = async (data, logger, cb) => {

const entityData = await snowflakeHelper.getEntityData({ fullTableName, logger });

logger.progress({ message: `Schema inference`, containerName: schema, entityName: table });
logger.progress({ message: `Schema inference`, containerName: fullSchemaName, entityName: table });
logger.log(
'info',
{ message: `Schema inference`, containerName: schema, entityName: table },
{ message: `Schema inference`, containerName: fullSchemaName, entityName: table },
'Getting schema',
);

const handledDocuments = snowflakeHelper.handleComplexTypesDocuments(jsonSchema, documents);

logger.progress({ message: `Data retrieved successfully`, containerName: schema, entityName: table });
logger.progress({
message: `Data retrieved successfully`,
containerName: fullSchemaName,
entityName: table,
});
logger.log(
'info',
{ message: `Data retrieved successfully`, containerName: schema, entityName: table },
{ message: `Data retrieved successfully`, containerName: fullSchemaName, entityName: table },
'Getting schema',
);

Expand All @@ -147,23 +175,23 @@ const getDbCollectionsData = async (data, logger, cb) => {
},
bucketInfo: {
indexes: [],
database,
database: databaseName,
...containerData,
},
};
});

const views = await Promise.all(
entities.views.map(async view => {
const fullViewName = snowflakeHelper.getFullEntityName(schema, view);
const fullViewName = snowflakeHelper.getFullEntityName(databaseName, schemaName, view);
logger.progress({
message: `Start getting data from view`,
containerName: schema,
containerName: fullSchemaName,
entityName: view,
});
logger.log(
'info',
{ message: `Start getting data from view`, containerName: schema, entityName: view },
{ message: `Start getting data from view`, containerName: fullSchemaName, entityName: view },
'Getting schema',
);

Expand All @@ -172,12 +200,12 @@ const getDbCollectionsData = async (data, logger, cb) => {

logger.progress({
message: `Data retrieved successfully`,
containerName: schema,
containerName: fullSchemaName,
entityName: view,
});
logger.log(
'info',
{ message: `Data retrieved successfully`, containerName: schema, entityName: view },
{ message: `Data retrieved successfully`, containerName: fullSchemaName, entityName: view },
'Getting schema',
);

Expand All @@ -204,7 +232,7 @@ const getDbCollectionsData = async (data, logger, cb) => {
emptyBucket: false,
bucketInfo: {
indexes: [],
database,
database: databaseName,
...containerData,
},
});
Expand Down
12 changes: 12 additions & 0 deletions reverse_engineering/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,18 @@
"value": "externalbrowser"
},
"value": "connectByExternalBrowser"
},
{
"dependency": {
"type": "not",
"values": [
{
"key": "authType",
"value": "externalbrowser"
}
]
},
"value": "getDatabases"
}
],
"helpUrl": "https://hackolade.com/help/ConnecttoaSnowflakeinstance.html"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,12 @@
"inputType": "text",
"inputTooltip": "Optionally specify the active/current warehouse for the session",
"defaultValue": ""
},
{
"inputLabel": "Schema name",
"inputKeyword": "databaseName",
"inputType": "text",
"inputPlaceholder": "Optional"
}
]
},
Expand Down
85 changes: 52 additions & 33 deletions reverse_engineering/helpers/snowflakeHelper.js
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ const connect = async (
name,
cloudPlatform,
queryRequestTimeout,
databaseName,
},
) => {
const account = getAccount(host);
Expand All @@ -54,7 +55,8 @@ const connect = async (
`Auth type: ${authType}\n` +
`Username: ${username}\n` +
`Warehouse: ${warehouse}\n` +
`Role: ${role}`,
`Role: ${role}\n`,
`Schema name: ${databaseName}`,
'Connection',
);

Expand Down Expand Up @@ -483,15 +485,20 @@ const showTablesByDatabases = async databases =>
databases.map(database => execute(`SHOW TABLES IN DATABASE "${removeQuotes(database.name)}";`)),
);

const showSchemasByDatabase = async databaseName =>
databaseName ? showSchemasInDatabase(databaseName) : showSchemas();

const showDatabases = () => execute('SHOW DATABASES;');

const showSchemas = () => execute('SHOW SCHEMAS;');

const showExternalTables = () => execute('SHOW EXTERNAL TABLES;');
const showSchemasInDatabase = databaseName => execute(`SHOW SCHEMAS IN DATABASE "${removeQuotes(databaseName)}";`);

const showExternalTables = ({ options = '' } = {}) => execute(`SHOW EXTERNAL TABLES${options};`);

const showViews = () => execute('SHOW VIEWS;');
const showViews = ({ options = '' } = {}) => execute(`SHOW VIEWS${options};`);

const showMaterializedViews = () => execute('SHOW MATERIALIZED VIEWS;');
const showMaterializedViews = ({ options = '' } = {}) => execute(`SHOW MATERIALIZED VIEWS${options};`);

const showIcebergTables = ({ options = '' } = {}) => execute(`SHOW ICEBERG TABLES${options};`);

Expand All @@ -507,8 +514,8 @@ const splitEntityNames = names => {

const isView = name => name.slice(-4) === ' (v)';

const getSchemasInfo = async () => {
const schemas = await showSchemas().catch(err => [{ status: 'error', message: err.message }]);
const getSchemasInfo = async databaseName => {
const schemas = await showSchemasByDatabase(databaseName).catch(err => [{ status: 'error', message: err.message }]);

if (schemas[0]?.status === 'error') {
return schemas;
Expand Down Expand Up @@ -592,19 +599,19 @@ const logTablesMeta = async ({ logger, tables = [], icebergTables = [] }) => {
logger.log('info', combinedMeta, 'Tables metadata');
};

const getEntitiesNames = async ({ logger }) => {
const getEntitiesNames = async ({ databaseName, logger }) => {
const logError = logErrorAndReturnEmptyArray({ logger, query: 'SHOW' });

const databases = await showDatabases().catch(logError);
const databaseQueryOptions = databaseName ? ` IN DATABASE "${removeQuotes(databaseName)}"` : '';
const databases = databaseName ? [{ name: databaseName }] : await showDatabases().catch(logError);
const tablesRows = await showTablesByDatabases(databases).catch(logError);
const flatTableRows = tablesRows.flatMap(row => row.value).filter(Boolean);
const icebergTables = await showIcebergTables().catch(logError);
const icebergTables = await showIcebergTables({ options: databaseQueryOptions }).catch(logError);

await logTablesMeta({ logger, tables: flatTableRows, icebergTables });

const externalTableRows = await showExternalTables().catch(logError);
const viewsRows = await showViews().catch(logError);
const materializedViewsRows = await showMaterializedViews().catch(logError);
const externalTableRows = await showExternalTables({ options: databaseQueryOptions }).catch(logError);
const viewsRows = await showViews({ options: databaseQueryOptions }).catch(logError);
const materializedViewsRows = await showMaterializedViews({ options: databaseQueryOptions }).catch(logError);

const entitiesRows = [
...flatTableRows,
Expand All @@ -626,7 +633,7 @@ const getEntitiesNames = async ({ logger }) => {
return [
...buckets,
{
dbName: `${dbName}.${schema}`,
dbName: schema,
dbCollections: entities,
isEmpty: !entities.length,
},
Expand All @@ -636,8 +643,17 @@ const getEntitiesNames = async ({ logger }) => {
}, []);
};

const getFullEntityName = (schemaName, tableName) => {
return [...schemaName.split('.'), tableName].map(addQuotes).join('.');
const getDatabaseNames = async ({ logger }) => {
const logError = logErrorAndReturnEmptyArray({ logger, query: 'SHOW' });
const databases = await showDatabases().catch(logError);

logger.log('info', { databases }, 'Found databases');

return databases.map(({ name }) => name);
};

const getFullEntityName = (databaseName, schemaName, tableName) => {
return [databaseName, schemaName, tableName].map(addQuotes).join('.');
};

const addQuotes = string => {
Expand Down Expand Up @@ -1502,12 +1518,12 @@ const getFileFormats = async (dbName, schemaName) => {
);
};

const getContainerData = async ({ schema, logger }) => {
if (containers[schema]) {
return containers[schema];
const getContainerData = async ({ databaseName, schemaName, logger }) => {
if (containers[schemaName]) {
return containers[schemaName];
}
const [dbName, schemaName] = schema.split('.');
const dbNameWithoutQuotes = removeQuotes(dbName);

const dbNameWithoutQuotes = removeQuotes(databaseName);

try {
const dbRows = await execute(
Expand All @@ -1519,13 +1535,13 @@ const getContainerData = async ({ schema, logger }) => {
);
const isCaseSensitive = _.toUpper(schemaName) !== schemaName;
const schemaData = _.first(schemaRows);
const functions = await getFunctions(dbName, schemaName);
const procedures = await getProcedures(dbName, schemaName);
const stages = await getStages(dbName, schemaName);
const sequences = await getSequences(dbName, schemaName);
const fileFormats = await getFileFormats(dbName, schemaName);
const tags = await getTags({ dbName, schemaName, logger });
const schemaTags = await getSchemaTags({ dbName, schemaName, logger });
const functions = await getFunctions(databaseName, schemaName);
const procedures = await getProcedures(databaseName, schemaName);
const stages = await getStages(databaseName, schemaName);
const sequences = await getSequences(databaseName, schemaName);
const fileFormats = await getFileFormats(databaseName, schemaName);
const tags = await getTags({ databaseName, schemaName, logger });
const schemaTags = await getSchemaTags({ databaseName, schemaName, logger });

const data = {
transient: Boolean(_.get(schemaData, 'IS_TRANSIENT', false) && _.get(schemaData, 'IS_TRANSIENT') !== 'NO'),
Expand All @@ -1540,7 +1556,7 @@ const getContainerData = async ({ schema, logger }) => {
tags,
schemaTags,
};
containers[schema] = data;
containers[schemaName] = data;

return data;
} catch (error) {
Expand All @@ -1564,9 +1580,11 @@ const getTagAllowedValues = ({ values, logger }) => {
}
};

const getTags = async ({ dbName, schemaName, logger }) => {
const getTags = async ({ databaseName, schemaName, logger }) => {
try {
const rows = await execute(`SHOW TAGS IN SCHEMA "${removeQuotes(dbName)}"."${removeQuotes(schemaName)}";`);
const rows = await execute(
`SHOW TAGS IN SCHEMA "${removeQuotes(databaseName)}"."${removeQuotes(schemaName)}";`,
);

return rows.map(row => ({
name: row.name,
Expand All @@ -1580,10 +1598,10 @@ const getTags = async ({ dbName, schemaName, logger }) => {
}
};

const getSchemaTags = async ({ dbName, schemaName, logger }) => {
const getSchemaTags = async ({ databaseName, schemaName, logger }) => {
try {
const rows = await execute(
`SELECT TAG_DATABASE, TAG_SCHEMA, TAG_NAME, TAG_VALUE FROM TABLE("${removeQuotes(dbName)}".information_schema.tag_references('"${removeQuotes(dbName)}"."${removeQuotes(schemaName)}"', 'SCHEMA'));`,
`SELECT TAG_DATABASE, TAG_SCHEMA, TAG_NAME, TAG_VALUE FROM TABLE("${removeQuotes(databaseName)}".information_schema.tag_references('"${removeQuotes(databaseName)}"."${removeQuotes(schemaName)}"', 'SCHEMA'));`,
);

return rows.map(row => ({
Expand Down Expand Up @@ -1628,6 +1646,7 @@ module.exports = {
disconnect,
testConnection,
getEntitiesNames,
getDatabaseNames,
getDDL,
getViewDDL,
getSchemaDDL,
Expand Down