diff --git a/src/driver/sqlite-abstract/AbstractSqliteDriver.ts b/src/driver/sqlite-abstract/AbstractSqliteDriver.ts index 09cb044e17..dc406c7bb7 100644 --- a/src/driver/sqlite-abstract/AbstractSqliteDriver.ts +++ b/src/driver/sqlite-abstract/AbstractSqliteDriver.ts @@ -130,12 +130,31 @@ export abstract class AbstractSqliteDriver implements Driver { /** * Gets list of column data types that support precision by a driver. */ - withPrecisionColumnTypes: ColumnType[] = []; + withPrecisionColumnTypes: ColumnType[] = [ + "real", + "double", + "double precision", + "float", + "real", + "numeric", + "decimal", + "date", + "time", + "datetime" + ]; /** * Gets list of column data types that support scale by a driver. */ - withScaleColumnTypes: ColumnType[] = []; + withScaleColumnTypes: ColumnType[] = [ + "real", + "double", + "double precision", + "float", + "real", + "numeric", + "decimal", + ]; /** * Orm has special columns and we need to know what database column types should be for those types. diff --git a/src/driver/sqlite-abstract/AbstractSqliteQueryRunner.ts b/src/driver/sqlite-abstract/AbstractSqliteQueryRunner.ts index c361695c2c..995b22d785 100644 --- a/src/driver/sqlite-abstract/AbstractSqliteQueryRunner.ts +++ b/src/driver/sqlite-abstract/AbstractSqliteQueryRunner.ts @@ -825,17 +825,31 @@ export abstract class AbstractSqliteQueryRunner extends BaseQueryRunner implemen } } - // parse datatype and attempt to retrieve length + // parse datatype and attempt to retrieve length, precision and scale let pos = tableColumn.type.indexOf("("); if (pos !== -1) { - let dataType = tableColumn.type.substr(0, pos); + const fullType = tableColumn.type; + let dataType = fullType.substr(0, pos); if (!!this.driver.withLengthColumnTypes.find(col => col === dataType)) { - let len = parseInt(tableColumn.type.substring(pos + 1, tableColumn.type.length - 1)); + let len = parseInt(fullType.substring(pos + 1, fullType.length - 1)); if (len) { tableColumn.length = len.toString(); tableColumn.type = dataType; // remove the length part from the datatype } } + if (!!this.driver.withPrecisionColumnTypes.find(col => col === dataType)) { + const re = new RegExp(`^${dataType}\\((\\d+),?\\s?(\\d+)?\\)`); + const matches = fullType.match(re); + if (matches && matches[1]) { + tableColumn.precision = +matches[1]; + } + if (!!this.driver.withScaleColumnTypes.find(col => col === dataType)) { + if (matches && matches[2]) { + tableColumn.scale = +matches[2]; + } + } + tableColumn.type = dataType; // remove the precision/scale part from the datatype + } } return tableColumn; diff --git a/test/github-issues/6636/entity/Test.ts b/test/github-issues/6636/entity/Test.ts new file mode 100644 index 0000000000..086363bbbc --- /dev/null +++ b/test/github-issues/6636/entity/Test.ts @@ -0,0 +1,15 @@ +import { Column, Entity, PrimaryColumn } from "../../../../src"; + +@Entity() +export class Test { + + @PrimaryColumn() + id: number; + + @Column({ nullable: true, precision: 6 }) + startedAt?: Date; + + @Column({ type: 'decimal', precision: 5, scale: 2 }) + value: number; + +} diff --git a/test/github-issues/6636/issue-6636.ts b/test/github-issues/6636/issue-6636.ts new file mode 100644 index 0000000000..841f7c4a5a --- /dev/null +++ b/test/github-issues/6636/issue-6636.ts @@ -0,0 +1,25 @@ +import { Connection } from "../../../src"; +import { closeTestingConnections, createTestingConnections, reloadTestingDatabases } from "../../utils/test-utils"; +import { Test } from "./entity/Test"; +import { expect } from "chai"; + +describe("github issues > #6636 migration issues with scale & precision", () => { + + let connections: Connection[]; + before(async () => connections = await createTestingConnections({ + entities: [Test], + enabledDrivers: ["sqljs", "sqlite", "better-sqlite3"], + })); + beforeEach(() => reloadTestingDatabases(connections)); + after(() => closeTestingConnections(connections)); + + it("should not create migrations columns with precision", async () => { + await Promise.all(connections.map(async (connection) => { + const sqlInMemory = await connection.driver.createSchemaBuilder().log(); + expect(sqlInMemory.upQueries).to.eql([]); + expect(sqlInMemory.downQueries).to.eql([]); + } + )) + }); + +});