postgres support, not working column types, finding PK

This commit is contained in:
Kononnable 2017-07-02 20:23:54 +02:00
parent e443d649dc
commit d60470a7f5
10 changed files with 616 additions and 66 deletions

View File

@ -17,7 +17,9 @@ env:
before_install:
- docker pull microsoft/mssql-server-linux
- docker pull postgres
- docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=!Passw0rd' -p 1433:1433 --name mssql -d microsoft/mssql-server-linux
- docker run -e 'POSTGRES_PASSWORD=!Passw0rd' -p 5432:5432 --name postgres -d postgres
before_script:

110
package-lock.json generated
View File

@ -48,8 +48,17 @@
"@types/node": {
"version": "7.0.31",
"resolved": "https://registry.npmjs.org/@types/node/-/node-7.0.31.tgz",
"integrity": "sha512-+KrE1LDddn97ip+gXZAnzNQ0pupKH/6tcKwTpo96BDVNpzmhIKGHug0Wd3H0dN4WEqYB1tXYI5m2mZuIZNI8tg==",
"dev": true
"integrity": "sha512-+KrE1LDddn97ip+gXZAnzNQ0pupKH/6tcKwTpo96BDVNpzmhIKGHug0Wd3H0dN4WEqYB1tXYI5m2mZuIZNI8tg=="
},
"@types/pg": {
"version": "6.1.41",
"resolved": "https://registry.npmjs.org/@types/pg/-/pg-6.1.41.tgz",
"integrity": "sha512-iTzD3R2CY/aSybbZieXGXi3Wxrk7XV/jRU1lofH/hyQNCbTTskXB/fomFDv+XTVXKjqrpCB/AXVLnUwbdFqthQ=="
},
"@types/pg-types": {
"version": "1.11.2",
"resolved": "https://registry.npmjs.org/@types/pg-types/-/pg-types-1.11.2.tgz",
"integrity": "sha512-ZkLnKt6q9PhVhM3hA9if2HCu+NLSETvk7TeHXqlZxIE1O7HL2RDm2epBscwDh3pEzDpWoB+VlUYpz20R3Kqj7A=="
},
"@types/sinon": {
"version": "2.3.1",
@ -129,6 +138,11 @@
"integrity": "sha1-q8av7tzqUugJzcA3au0845Y10X8=",
"dev": true
},
"ap": {
"version": "0.2.0",
"resolved": "https://registry.npmjs.org/ap/-/ap-0.2.0.tgz",
"integrity": "sha1-rglCYAspkS8NKxTsYMRejzMLYRA="
},
"app-root-path": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/app-root-path/-/app-root-path-2.0.1.tgz",
@ -298,6 +312,11 @@
"integrity": "sha1-81HTKWnTL6XXpVZxVCY9korjvR8=",
"dev": true
},
"buffer-writer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/buffer-writer/-/buffer-writer-1.0.1.tgz",
"integrity": "sha1-Iqk2kB4wKa/NdUfrRIfOtpejvwg="
},
"builtin-modules": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/builtin-modules/-/builtin-modules-1.1.1.tgz",
@ -1554,6 +1573,11 @@
}
}
},
"moment": {
"version": "2.18.1",
"resolved": "https://registry.npmjs.org/moment/-/moment-2.18.1.tgz",
"integrity": "sha1-w2GT3Tzhwu7SrbfIAtu8d6gbHA8="
},
"ms": {
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/ms/-/ms-0.7.2.tgz",
@ -1658,6 +1682,11 @@
"integrity": "sha1-iGmgQBJTZhxMTKPabCEh7VVfXu0=",
"dev": true
},
"packet-reader": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/packet-reader/-/packet-reader-0.3.1.tgz",
"integrity": "sha1-zWLmCvjX/qinBexP+ZCHHEaHHyc="
},
"parent-require": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/parent-require/-/parent-require-1.0.0.tgz",
@ -1703,6 +1732,50 @@
"resolved": "https://registry.npmjs.org/path-type/-/path-type-1.1.0.tgz",
"integrity": "sha1-WcRPfuSR2nBNpBXaWkBwuk+P5EE="
},
"pg": {
"version": "6.4.0",
"resolved": "https://registry.npmjs.org/pg/-/pg-6.4.0.tgz",
"integrity": "sha1-y3a6Lnwuq4n8ZL96n+ZIztckNtw=",
"dependencies": {
"semver": {
"version": "4.3.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-4.3.2.tgz",
"integrity": "sha1-x6BxWKgL7dBSNVt3DYLWZA+AO+c="
}
}
},
"pg-connection-string": {
"version": "0.1.3",
"resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-0.1.3.tgz",
"integrity": "sha1-2hhHsglA5C7hSSvq9l1J2RskXfc="
},
"pg-pool": {
"version": "1.8.0",
"resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-1.8.0.tgz",
"integrity": "sha1-9+xzgkw3oD8Hb1G/33DjQBR8Tzc=",
"dependencies": {
"generic-pool": {
"version": "2.4.3",
"resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-2.4.3.tgz",
"integrity": "sha1-eAw29p360FpaBF3Te+etyhGk9v8="
},
"object-assign": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz",
"integrity": "sha1-ejs9DpgGPUP0wD8uiubNUahog6A="
}
}
},
"pg-types": {
"version": "1.12.0",
"resolved": "https://registry.npmjs.org/pg-types/-/pg-types-1.12.0.tgz",
"integrity": "sha1-itO3uJfj/UY+Yt4kGtX8ZAtKZvA="
},
"pgpass": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.2.tgz",
"integrity": "sha1-Knu0G2BltnkH6R2hsHwYR8h3swY="
},
"pify": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz",
@ -1748,6 +1821,26 @@
"integrity": "sha1-jdcMT+fGlBCa3XhP/oDqysHnso0=",
"dev": true
},
"postgres-array": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-1.0.2.tgz",
"integrity": "sha1-jgsy6wO/d6XAp4UeBEHBaaJWojg="
},
"postgres-bytea": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.0.tgz",
"integrity": "sha1-AntTPAqokOJtFy1Hz5zOzFIazTU="
},
"postgres-date": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.3.tgz",
"integrity": "sha1-4tiXAu/bJY/52c7g/pG9BpdSV6g="
},
"postgres-interval": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.1.0.tgz",
"integrity": "sha1-EDHnusNFZBMoYq3J62xtLzqnW7Q="
},
"prelude-ls": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz",
@ -2020,6 +2113,11 @@
"resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz",
"integrity": "sha1-yd96NCRZSt5r0RkA1ZZpbcBrrFc="
},
"split": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/split/-/split-1.0.0.tgz",
"integrity": "sha1-xDlc5oOrzSVLwo/h2rtuXCfc/64="
},
"sprintf": {
"version": "0.1.5",
"resolved": "https://registry.npmjs.org/sprintf/-/sprintf-0.1.5.tgz",
@ -2129,6 +2227,11 @@
"integrity": "sha512-/EY8VpvlqJ+sFtLPeOgc8Pl7kQVOWv0woD87KTXVHPIAE842FGT+rokxIhe8xIUP1cfgrkt0as0vDLjDiMtr8w==",
"dev": true
},
"through": {
"version": "2.3.8",
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz",
"integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU="
},
"through2": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/through2/-/through2-2.0.1.tgz",
@ -2434,8 +2537,7 @@
"xtend": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz",
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68=",
"dev": true
"integrity": "sha1-pcbVMr5lbiPbgg77lDofBJmNY68="
},
"y18n": {
"version": "3.2.1",

View File

@ -22,8 +22,10 @@
},
"homepage": "https://github.com/Kononnable/typeorm-model-generator#readme",
"dependencies": {
"@types/pg": "^6.1.41",
"handlebars": "^4.0.10",
"mssql": "^3.3.0",
"pg": "^6.4.0",
"reflect-metadata": "^0.1.10",
"typeorm": "0.0.10",
"typescript": "^2.4.0",

View File

@ -147,16 +147,16 @@ export class MssqlDriver extends AbstractDriver {
let request = new MSSQL.Request(this.Connection)
let response: {
TableName: string, IndexName: string, ColumnName: string, is_unique: number,
is_primary_key: number, is_descending_key: number, is_included_column: number
is_primary_key: number//, is_descending_key: number//, is_included_column: number
}[]
= await request.query(`SELECT
TableName = t.name,
IndexName = ind.name,
ColumnName = col.name,
ind.is_unique,
ind.is_primary_key,
ic.is_descending_key,
ic.is_included_column
ind.is_primary_key
-- ,ic.is_descending_key,
-- ic.is_included_column
FROM
sys.indexes ind
INNER JOIN
@ -189,8 +189,8 @@ ORDER BY
ent.Indexes.push(indexInfo);
}
indexColumnInfo.name = resp.ColumnName;
indexColumnInfo.isIncludedColumn = resp.is_included_column == 1 ? true : false;
indexColumnInfo.isDescending = resp.is_descending_key == 1 ? true : false;
// indexColumnInfo.isIncludedColumn = resp.is_included_column == 1 ? true : false;
// indexColumnInfo.isDescending = resp.is_descending_key == 1 ? true : false;
indexInfo.columns.push(indexColumnInfo);
})

View File

@ -0,0 +1,387 @@
import { AbstractDriver } from './AbstractDriver'
import * as PG from 'pg'
import { ColumnInfo } from './../models/ColumnInfo'
import { EntityInfo } from './../models/EntityInfo'
import { RelationInfo } from './../models/RelationInfo'
import { DatabaseModel } from './../models/DatabaseModel'
/**
* PostgresDriver
*/
export class PostgresDriver extends AbstractDriver {
private Connection:PG.Client;
FindPrimaryColumnsFromIndexes(dbModel: DatabaseModel) {
dbModel.entities.forEach(entity => {
let primaryIndex = entity.Indexes.find(v => v.isPrimaryKey);
if (!primaryIndex) {
console.error(`Table ${entity.EntityName} has no PK.`)
return;
}
entity.Columns.forEach(col => {
if (primaryIndex!.columns.some(cIndex => cIndex.name == col.name)) col.isPrimary = true
})
});
}
async GetAllTables(): Promise<EntityInfo[]> {
let response: { table_schema: string, table_name: string }[]
= (await this.Connection.query("SELECT table_schema,table_name FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_TYPE='BASE TABLE' AND table_schema = 'public' ")).rows;
let ret: EntityInfo[] = <EntityInfo[]>[];
response.forEach((val) => {
let ent: EntityInfo = new EntityInfo();
ent.EntityName = val.table_name;
ent.Columns = <ColumnInfo[]>[];
ent.Indexes = <IndexInfo[]>[];
ret.push(ent);
})
return ret;
}
async GetCoulmnsFromEntity(entities: EntityInfo[]): Promise<EntityInfo[]> {
let response: {
table_name: string, column_name: string, column_default: string,
is_nullable: string, data_type: string, character_maximum_length: number,
numeric_precision: number, numeric_scale: number, isidentity: number
}[]
= (await this.Connection.query(`SELECT table_name,column_name,column_default,is_nullable,
data_type,character_maximum_length,numeric_precision,numeric_scale
--,COLUMNPROPERTY(object_id(table_name), column_name, 'isidentity') isidentity
, case when column_default LIKE 'nextval%' then 'YES' else 'NO' end isidentity
FROM INFORMATION_SCHEMA.COLUMNS where table_schema ='public'`)).rows;
entities.forEach((ent) => {
response.filter((filterVal) => {
return filterVal.table_name == ent.EntityName;
}).forEach((resp) => {
let colInfo: ColumnInfo = new ColumnInfo();
colInfo.name = resp.column_name;
colInfo.is_nullable = resp.is_nullable == 'YES' ? true : false;
colInfo.is_generated = resp.isidentity == 1 ? true : false;
colInfo.default = colInfo.is_generated?'':resp.column_default;
switch (resp.data_type) {
//TODO:change types to postgres
case "integer":
colInfo.ts_type = "number"
colInfo.sql_type = "int"
break;
case "character varying":
colInfo.ts_type = "string"
colInfo.sql_type = "text"
break;
case "text":
colInfo.ts_type = "string"
colInfo.sql_type = "text"
break;
case "smallint":
colInfo.ts_type = "number"
colInfo.sql_type = "smallint"
break;
case "bigint":
colInfo.ts_type = "number"
colInfo.sql_type = "bigint"
break;
case "date":
colInfo.ts_type = "Date"
colInfo.sql_type = "date"
break;
case "boolean":
colInfo.ts_type = "boolean"
colInfo.sql_type = "boolean"
break;
// case "double precision":
// colInfo.ts_type = "number"
// colInfo.sql_type = "boolean"
// break;
// case "boolean":
// colInfo.ts_type = "boolean"
// colInfo.sql_type = "boolean"
// break;
// case "boolean":
// colInfo.ts_type = "boolean"
// colInfo.sql_type = "boolean"
// break;
// case "boolean":
// colInfo.ts_type = "boolean"
// colInfo.sql_type = "boolean"
// break;
// case "boolean":
// colInfo.ts_type = "boolean"
// colInfo.sql_type = "boolean"
// break;
// case "boolean":
// colInfo.ts_type = "boolean"
// colInfo.sql_type = "boolean"
// break;
default:
console.error("Unknown column type:" + resp.data_type);
break;
}
colInfo.char_max_lenght = resp.character_maximum_length > 0 ? resp.character_maximum_length : null;
if (colInfo.sql_type) ent.Columns.push(colInfo);
})
})
return entities;
}
async GetIndexesFromEntity(entities: EntityInfo[]): Promise<EntityInfo[]> {
let response: {
tablename: string, indexname: string, columnname: string, is_unique: number,
is_primary_key: number//, is_descending_key: number//, is_included_column: number
}[]
= (await this.Connection.query(`SELECT
c.relname AS tablename,
i.relname as indexname,
f.attname AS columnname,
CASE
WHEN p.contype = 'u' THEN 't'
WHEN p.contype = 'p' THEN 't'
ELSE 'f'
END AS is_unique,
CASE
WHEN p.contype = 'p' THEN 't'
ELSE 'f'
END AS is_primary_key
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
JOIN pg_type t ON t.oid = f.atttypid
LEFT JOIN pg_attrdef d ON d.adrelid = c.oid AND d.adnum = f.attnum
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN pg_constraint p ON p.conrelid = c.oid AND f.attnum = ANY (p.conkey)
LEFT JOIN pg_class AS g ON p.confrelid = g.oid
LEFT JOIN pg_index AS ix ON f.attnum = ANY(ix.indkey) and c.oid = f.attrelid and c.oid = ix.indrelid
LEFT JOIN pg_class AS i ON ix.indexrelid = i.oid
WHERE c.relkind = 'r'::char
AND n.nspname = 'public' -- Replace with Schema name
--AND c.relname = 'nodes' -- Replace with table name, or Comment this for get all tables
AND f.attnum > 0
AND i.oid<>0
ORDER BY c.relname,f.attname;`)).rows;
entities.forEach((ent) => {
response.filter((filterVal) => {
return filterVal.tablename == ent.EntityName;
}).forEach((resp) => {
let indexInfo: IndexInfo = <IndexInfo>{};
let indexColumnInfo: IndexColumnInfo = <IndexColumnInfo>{};
if (ent.Indexes.filter((filterVal) => {
return filterVal.name == resp.indexname
}).length > 0) {
indexInfo = ent.Indexes.filter((filterVal) => {
return filterVal.name == resp.indexname
})[0];
} else {
indexInfo.columns = <IndexColumnInfo[]>[];
indexInfo.name = resp.indexname;
indexInfo.isUnique = resp.is_unique == 1 ? true : false;
indexInfo.isPrimaryKey = resp.is_primary_key == 1 ? true : false;
ent.Indexes.push(indexInfo);
}
indexColumnInfo.name = resp.columnname;
// indexColumnInfo.isIncludedColumn = resp.is_included_column == 1 ? true : false;
//indexColumnInfo.isDescending = resp.is_descending_key == 1 ? true : false;
indexInfo.columns.push(indexColumnInfo);
})
})
return entities;
}
async GetRelations(entities: EntityInfo[]): Promise<EntityInfo[]> {
let response: {
tablewithforeignkey: string, fk_partno: number, foreignkeycolumn: string,
tablereferenced: string, foreignkeycolumnreferenced: string,
ondelete: "RESTRICT" | "CASCADE" | "SET NULL" | "NO ACTION",
onupdate: "RESTRICT" | "CASCADE" | "SET NULL" | "NO ACTION", object_id: string
}[]
= (await this.Connection.query(`SELECT
cl.relname AS tablewithforeignkey,
att.attnum as fk_partno,
att.attname AS foreignkeycolumn,
con.relname AS tablereferenced,
att2.attname AS foreignkeycolumnreferenced,
update_rule as ondelete,
delete_rule as onupdate,
con.conname as object_id
FROM (
SELECT
unnest(con1.conkey) AS parent,
unnest(con1.confkey) AS child,
con1.confrelid,
con1.conrelid,
cl_1.relname,
con1.conname
FROM
pg_class cl_1,
pg_namespace ns,
pg_constraint con1
WHERE
con1.contype = 'f'::"char"
AND cl_1.relnamespace = ns.oid
AND con1.conrelid = cl_1.oid
) con,
pg_attribute att,
pg_class cl,
pg_attribute att2,
information_schema.referential_constraints rc
WHERE
att.attrelid = con.confrelid
AND att.attnum = con.child
AND cl.oid = con.confrelid
AND att2.attrelid = con.conrelid
AND att2.attnum = con.parent
and rc.constraint_name= con.conname`)).rows;
let relationsTemp: RelationTempInfo[] = <RelationTempInfo[]>[];
response.forEach((resp) => {
let rels = relationsTemp.find((val) => {
return val.object_id == resp.object_id;
})
if (rels == undefined) {
rels = <RelationTempInfo>{};
rels.ownerColumnsNames = [];
rels.referencedColumnsNames = [];
rels.actionOnDelete = resp.ondelete;
rels.actionOnUpdate = resp.onupdate;
rels.object_id = resp.object_id;
rels.ownerTable = resp.tablewithforeignkey;
rels.referencedTable = resp.tablereferenced;
relationsTemp.push(rels);
}
rels.ownerColumnsNames.push(resp.foreignkeycolumn);
rels.referencedColumnsNames.push(resp.foreignkeycolumnreferenced);
})
relationsTemp.forEach((relationTmp) => {
let ownerEntity = entities.find((entitity) => {
return entitity.EntityName == relationTmp.ownerTable;
})
if (!ownerEntity) {
console.error(`Relation between tables ${relationTmp.ownerTable} and ${relationTmp.referencedTable} didn't found entity model ${relationTmp.ownerTable}.`)
return;
}
let referencedEntity = entities.find((entitity) => {
return entitity.EntityName == relationTmp.referencedTable;
})
if (!referencedEntity) {
console.error(`Relation between tables ${relationTmp.ownerTable} and ${relationTmp.referencedTable} didn't found entity model ${relationTmp.referencedTable}.`)
return;
}
let ownerColumn = ownerEntity.Columns.find((column) => {
return column.name == relationTmp.ownerColumnsNames[0];
})
if (!ownerColumn) {
console.error(`Relation between tables ${relationTmp.ownerTable} and ${relationTmp.referencedTable} didn't found entity column ${relationTmp.ownerTable}.${ownerColumn}.`)
return;
}
let relatedColumn = referencedEntity.Columns.find((column) => {
return column.name == relationTmp.referencedColumnsNames[0];
})
if (!relatedColumn) {
console.error(`Relation between tables ${relationTmp.ownerTable} and ${relationTmp.referencedTable} didn't found entity column ${relationTmp.referencedTable}.${relatedColumn}.`)
return;
}
let ownColumn: ColumnInfo = ownerColumn;
let isOneToMany: boolean;
isOneToMany = false;
let index = ownerEntity.Indexes.find(
(index) => {
return index.isUnique && index.columns.some(col => {
return col.name == ownerColumn!.name
})
}
)
if (!index) {
isOneToMany = true;
} else {
isOneToMany = false;
}
let ownerRelation = new RelationInfo()
ownerRelation.actionOnDelete = relationTmp.actionOnDelete
ownerRelation.actionOnUpdate = relationTmp.actionOnUpdate
ownerRelation.isOwner = true
ownerRelation.relatedColumn = relatedColumn.name.toLowerCase()
ownerRelation.relatedTable = relationTmp.referencedTable
ownerRelation.ownerTable = relationTmp.ownerTable
ownerRelation.ownerColumn = ownerEntity.EntityName.toLowerCase()+(isOneToMany ? 's':'')
ownerRelation.relationType = isOneToMany ? "ManyToOne" : "OneToOne"
ownerColumn.relations.push(ownerRelation)
if (isOneToMany) {
let col = new ColumnInfo()
col.name = ownerEntity.EntityName.toLowerCase() +'s'
let referencedRelation = new RelationInfo();
col.relations.push(referencedRelation)
referencedRelation.actionondelete = relationTmp.actionOnDelete
referencedRelation.actiononupdate = relationTmp.actionOnUpdate
referencedRelation.isOwner = false
referencedRelation.relatedColumn = ownerColumn.name
referencedRelation.relatedTable = relationTmp.ownerTable
referencedRelation.ownerTable = relationTmp.referencedTable
referencedRelation.ownerColumn = relatedColumn.name.toLowerCase()
referencedRelation.relationType = "OneToMany"
referencedEntity.Columns.push(col)
} else {
let col = new ColumnInfo()
col.name = ownerEntity.EntityName.toLowerCase()
let referencedRelation = new RelationInfo();
col.relations.push(referencedRelation)
referencedRelation.actionondelete = relationTmp.actionOnDelete
referencedRelation.actiononupdate = relationTmp.actionOnUpdate
referencedRelation.isOwner = false
referencedRelation.relatedColumn = ownerColumn.name
referencedRelation.relatedTable = relationTmp.ownerTable
referencedRelation.ownerTable = relationTmp.referencedTable
referencedRelation.ownerColumn = relatedColumn.name.toLowerCase()
referencedRelation.relationType = "OneToOne"
referencedEntity.Columns.push(col)
}
})
return entities;
}
async DisconnectFromServer() {
if (this.Connection){
let promise = new Promise<boolean>(
(resolve, reject) => { this.Connection.end((err) => {
if (!err) {
//Connection successfull
resolve(true)
}
else {
console.error('Error connecting to Postgres Server.')
console.error(err.message)
process.abort()
reject(err)
}
});
})
await promise;
}
}
async ConnectToServer(database: string, server: string, port: number, user: string, password: string) {
this.Connection=new PG.Client({database: database,
host: server,
port: port,
user: user,
password: password})
let promise = new Promise<boolean>(
(resolve, reject) => {
this.Connection.connect( (err) => {
if (!err) {
//Connection successfull
resolve(true)
}
else {
console.error('Error connecting to Postgres Server.')
console.error(err.message)
process.abort()
reject(err)
}
});
}
)
await promise;
}
}

View File

@ -3,6 +3,7 @@ import { Engine } from './Engine'
import * as Yargs from 'yargs'
import { AbstractDriver } from "./drivers/AbstractDriver";
import path = require('path')
import { PostgresDriver } from "./drivers/PostgresDriver";
@ -35,13 +36,13 @@ var argv = Yargs
.option('e', {
alias: 'engine',
describe: 'Database engine.',
choices: ['mssql'],
choices: ['mssql','postgres'],
default: 'mssql'
})
.option('o', {
alias: 'output',
describe: 'Where to place generated models.',
default: path.resolve(process.cwd(),'output')
default: path.resolve(process.cwd(), 'output')
})
.argv;
@ -52,6 +53,10 @@ switch (argv.e) {
case 'mssql':
driver = new MssqlDriver();
standardPort = 1433;
break;
case 'postgres':
driver = new PostgresDriver();
standardPort = 5432;
break;
default:
console.error('Database engine not recognized.')
@ -60,18 +65,18 @@ switch (argv.e) {
}
let engine = new Engine(
driver,{
driver, {
host: argv.h,
port: parseInt(argv.p) || standardPort,
databaseName: argv.d,
user: argv.u,
password: argv.x,
databaseType:argv.e,
resultsPath:argv.o
databaseType: argv.e,
resultsPath: argv.o
});
console.log(`[${new Date().toLocaleTimeString()}] Starting creation of model classes.`);
engine.createModelFromDatabase().then( ()=>{
console.info(`[${new Date().toLocaleTimeString()}] Typeorm model classes created.`)
})
engine.createModelFromDatabase().then(() => {
console.info(`[${new Date().toLocaleTimeString()}] Typeorm model classes created.`)
})

View File

@ -1,5 +1,5 @@
interface IndexColumnInfo{
name:string,
isDescending:boolean,
isIncludedColumn:boolean
//isDescending:boolean,
// isIncludedColumn:boolean
}

View File

@ -1,12 +1,13 @@
export class RelationInfo {
export class RelationInfo { [x: string]: any;
isOwner: boolean
relationType: "OneToOne" | "OneToMany" | "ManyToOne"
relatedTable: string
relatedColumn: string
ownerTable: string
ownerColumn: string
actionOnDelete: "RESTRICT" | "CASCADE" | "SET NULL"
actionOnUpdate: "RESTRICT" | "CASCADE" | "SET NULL"
actionOnDelete: "RESTRICT" | "CASCADE" | "SET NULL" | "NO ACTION"
actionOnUpdate: "RESTRICT" | "CASCADE" | "SET NULL" | "NO ACTION"
get isOneToMany(): boolean {
return this.relationType == "OneToMany"

View File

@ -3,7 +3,7 @@ interface RelationTempInfo{
ownerColumnsNames:string[],
referencedTable:string,
referencedColumnsNames:string[],
actionOnDelete:"RESTRICT"|"CASCADE"|"SET NULL",
actionOnUpdate:"RESTRICT"|"CASCADE"|"SET NULL",
object_id:number
actionOnDelete:"RESTRICT"|"CASCADE"|"SET NULL"|"NO ACTION",
actionOnUpdate:"RESTRICT"|"CASCADE"|"SET NULL"|"NO ACTION",
object_id:number|string
}

View File

@ -13,12 +13,13 @@ import { EntityFileToJson } from "../utils/EntityFileToJson";
var chai = require('chai');
var chaiSubset = require('chai-subset');
import * as ts from "typescript";
import { PostgresDriver } from "../../src/drivers/PostgresDriver";
chai.use(chaiSubset);
describe("integration tests", async function() {
describe("integration tests", async function () {
this.timeout(10000)
this.slow(5000)//compiling created models takes time
let examplesPath = path.resolve(process.cwd(), 'test/integration/examples')
@ -26,57 +27,39 @@ describe("integration tests", async function() {
let dbDrivers: DriverType[] = []
if (process.env.MSSQL_Skip == '0') dbDrivers.push('mssql')
if (process.env.POSTGRES_Skip == '0') dbDrivers.push('postgres')
for (let folder of files) {
describe(folder, async function() {
describe(folder, async function () {
for (let dbDriver of dbDrivers) {
it(dbDriver, async function() {
it(dbDriver, async function () {
let filesOrgPath = path.resolve(examplesPath, folder, 'entity')
let connOpt: ConnectionOptions = {
driver: {
database: process.env.MSSQL_Database,
host: process.env.MSSQL_Host,
password: process.env.MSSQL_Password,
type: 'mssql',
username: process.env.MSSQL_Username,
port: process.env.MSSQL_Port
},
dropSchemaOnConnection: true,
autoSchemaSync: true,
entities: [path.resolve(filesOrgPath, '*.js')],
}
let conn = await createConnection(connOpt)
if (conn.isConnected)
await conn.close()
let driver: AbstractDriver;
driver = new MssqlDriver();
let resultsPath = path.resolve(process.cwd(), `output`)
let engine = new Engine(
driver, {
host: process.env.MSSQL_Host,
port: process.env.MSSQL_Port,
databaseName: process.env.MSSQL_Database,
user: process.env.MSSQL_Username,
password: process.env.MSSQL_Password,
databaseType: 'mssql',
resultsPath: resultsPath
});
fs.removeSync(resultsPath)
let engine: Engine;
switch (dbDriver) {
case 'mssql':
engine = await createMSSQLModels(filesOrgPath, resultsPath)
break;
case 'postgres':
engine = await createPostgresModels(filesOrgPath, resultsPath)
break;
default:
console.log(`Unknown engine type`);
engine=<Engine>{}
break;
}
let result = await engine.createModelFromDatabase()
let filesGenPath = path.resolve(resultsPath, 'entities')
let filesOrg = fs.readdirSync(filesOrgPath).filter(function(this, val, ind, arr) { return val.toString().endsWith('.ts') })
let filesGen = fs.readdirSync(filesGenPath).filter(function(this, val, ind, arr) { return val.toString().endsWith('.ts') })
let filesOrg = fs.readdirSync(filesOrgPath).filter(function (this, val, ind, arr) { return val.toString().endsWith('.ts') })
let filesGen = fs.readdirSync(filesGenPath).filter(function (this, val, ind, arr) { return val.toString().endsWith('.ts') })
expect(filesOrg, 'Errors detected in model comparision').to.be.deep.equal(filesGen)
@ -107,6 +90,74 @@ describe("integration tests", async function() {
}
})
async function createMSSQLModels(filesOrgPath: string, resultsPath: string): Promise<Engine> {
let connOpt: ConnectionOptions = {
driver: {
database: process.env.MSSQL_Database,
host: process.env.MSSQL_Host,
password: process.env.MSSQL_Password,
type: 'mssql',
username: process.env.MSSQL_Username,
port: process.env.MSSQL_Port
},
dropSchemaOnConnection: true,
autoSchemaSync: true,
entities: [path.resolve(filesOrgPath, '*.js')],
}
let conn = await createConnection(connOpt)
if (conn.isConnected)
await conn.close()
let driver: AbstractDriver;
driver = new MssqlDriver();
let engine = new Engine(
driver, {
host: process.env.MSSQL_Host,
port: process.env.MSSQL_Port,
databaseName: process.env.MSSQL_Database,
user: process.env.MSSQL_Username,
password: process.env.MSSQL_Password,
databaseType: 'mssql',
resultsPath: resultsPath
});
return engine;
}
async function createPostgresModels(filesOrgPath: string, resultsPath: string): Promise<Engine> {
let connOpt: ConnectionOptions = {
driver: {
database: process.env.POSTGRES_Database,
host: process.env.POSTGRES_Host,
password: process.env.POSTGRES_Password,
type: 'postgres',
username: process.env.POSTGRES_Username,
port: process.env.POSTGRES_Port
},
dropSchemaOnConnection: true,
autoSchemaSync: true,
entities: [path.resolve(filesOrgPath, '*.js')],
}
let conn = await createConnection(connOpt)
if (conn.isConnected)
await conn.close()
let driver: AbstractDriver;
driver = new PostgresDriver();
let engine = new Engine(
driver, {
host: process.env.POSTGRES_Host,
port: process.env.POSTGRES_Port,
databaseName: process.env.POSTGRES_Database,
user: process.env.POSTGRES_Username,
password: process.env.POSTGRES_Password,
databaseType: 'postgres',
resultsPath: resultsPath
});
return engine;
}
function compileTsFiles(fileNames: string[], options: ts.CompilerOptions): boolean {
let program = ts.createProgram(fileNames, options);
let emitResult = program.emit();
@ -114,9 +165,9 @@ function compileTsFiles(fileNames: string[], options: ts.CompilerOptions): boole
let allDiagnostics = ts.getPreEmitDiagnostics(program).concat(emitResult.diagnostics);
allDiagnostics.forEach(diagnostic => {
let lineAndCharacter = diagnostic.file.getLineAndCharacterOfPosition(diagnostic.start);
let lineAndCharacter = diagnostic.file!.getLineAndCharacterOfPosition(diagnostic.start!);
let message = ts.flattenDiagnosticMessageText(diagnostic.messageText, '\n');
console.log(`${diagnostic.file.fileName} (${lineAndCharacter.line + 1},${lineAndCharacter.character + 1}): ${message}`);
console.log(`${diagnostic.file!.fileName} (${lineAndCharacter.line + 1},${lineAndCharacter.character + 1}): ${message}`);
compileErrors = true;
});