ignore PostGIS table on schema generation

This commit is contained in:
Kononnable 2019-12-15 22:39:09 +01:00
parent d8448fd5ed
commit f5a9da2880
2 changed files with 13 additions and 19 deletions

View File

@ -3,7 +3,8 @@
## Unreleased
* change default case conversions for generated files (#196)
* enum type safety #205
* enum type safety (#205)
* postgress geography type support (#232)
## 0.3.5

View File

@ -29,11 +29,10 @@ export default class PostgresDriver extends AbstractDriver {
TABLE_SCHEMA: string;
TABLE_NAME: string;
DB_NAME: string;
}[] = (
await this.Connection.query(
`SELECT table_schema as "TABLE_SCHEMA",table_name as "TABLE_NAME", table_catalog as "DB_NAME" FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE' AND table_schema in (${schema}) `
)
).rows;
}[] = (await this.Connection.query(
`SELECT table_schema as "TABLE_SCHEMA",table_name as "TABLE_NAME", table_catalog as "DB_NAME" FROM INFORMATION_SCHEMA.TABLES
WHERE TABLE_TYPE='BASE TABLE' AND table_schema in (${schema}) AND table_name<>'spatial_ref_sys'`
)).rows;
return response;
};
@ -54,9 +53,8 @@ export default class PostgresDriver extends AbstractDriver {
isidentity: string;
isunique: string;
enumvalues: string | null;
}[] = (
await this.Connection
.query(`SELECT table_name,column_name,udt_name,column_default,is_nullable,
}[] = (await this.Connection
.query(`SELECT table_name,column_name,udt_name,column_default,is_nullable,
data_type,character_maximum_length,numeric_precision,numeric_scale,
case when column_default LIKE 'nextval%' then 'YES' else 'NO' end isidentity,
(SELECT count(*)
@ -77,8 +75,7 @@ WHERE "n"."nspname" = table_schema AND "t"."typname"=udt_name
) enumValues
FROM INFORMATION_SCHEMA.COLUMNS c
where table_schema in (${schema})
order by ordinal_position`)
).rows;
order by ordinal_position`)).rows;
entities.forEach(ent => {
response
.filter(filterVal => filterVal.table_name === ent.tsEntityName)
@ -412,8 +409,7 @@ WHERE "n"."nspname" = table_schema AND "t"."typname"=udt_name
columnname: string;
is_unique: number;
is_primary_key: number;
}[] = (
await this.Connection.query(`SELECT
}[] = (await this.Connection.query(`SELECT
c.relname AS tablename,
i.relname as indexname,
f.attname AS columnname,
@ -436,8 +432,7 @@ WHERE "n"."nspname" = table_schema AND "t"."typname"=udt_name
AND n.nspname in (${schema})
AND f.attnum > 0
AND i.oid<>0
ORDER BY c.relname,f.attname;`)
).rows;
ORDER BY c.relname,f.attname;`)).rows;
entities.forEach(ent => {
response
.filter(filterVal => filterVal.tablename === ent.tsEntityName)
@ -484,8 +479,7 @@ WHERE "n"."nspname" = table_schema AND "t"."typname"=udt_name
onupdate: "RESTRICT" | "CASCADE" | "SET NULL" | "NO ACTION";
object_id: string;
// Distinct because of note in https://www.postgresql.org/docs/9.1/information-schema.html
}[] = (
await this.Connection.query(`SELECT DISTINCT
}[] = (await this.Connection.query(`SELECT DISTINCT
con.relname AS tablewithforeignkey,
att.attnum as fk_partno,
att2.attname AS foreignkeycolumn,
@ -524,8 +518,7 @@ WHERE "n"."nspname" = table_schema AND "t"."typname"=udt_name
AND att2.attrelid = con.conrelid
AND att2.attnum = con.parent
AND rc.constraint_name= con.conname AND constraint_catalog=current_database() AND rc.constraint_schema=nspname
`)
).rows;
`)).rows;
const relationsTemp: RelationTempInfo[] = [] as RelationTempInfo[];
response.forEach(resp => {
let rels = relationsTemp.find(