Skip to content

Commit 7e4b2ae

Browse files
committed
Merge branch 'release/4.0.1'
2 parents 562c0a0 + b3c54ca commit 7e4b2ae

File tree

9 files changed

+113
-122
lines changed

9 files changed

+113
-122
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ from MySQL to PostgreSQL as easy and smooth as possible.</p>
8989
<b>Note:</b> "logs_directory" will be created during script execution.</p>
9090

9191
<h3>VERSION</h3>
92-
<p>Current version is 4.0.0<br />
92+
<p>Current version is 4.0.1<br />
9393
(major version . improvements . bug fixes)</p>
9494

9595
<h3>KNOWN ISSUES</h3>

config/config.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
"host" : "localhost",
1414
"port" : 3306,
1515
"database" : "test_db",
16-
"charset" : "UTF8",
16+
"charset" : "utf8mb4",
1717
"user" : "root",
1818
"password" : "0123456789"
1919
},
@@ -82,7 +82,7 @@
8282
"exclude_tables": [],
8383

8484
"include_tables_description": [
85-
"List (Array) of tables, that will not be migrated.",
85+
"List (Array) of tables, that will be migrated.",
8686
"By default, nmig will migrate all tables."
8787
],
8888
"include_tables": [],

config/test_config.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
"source" : {
1212
"host" : "localhost",
1313
"port" : 3306,
14-
"charset" : "UTF8",
14+
"charset" : "utf8mb4",
1515
"database" : "nmig_test_db",
1616
"user" : "root",
1717
"password" : "0123456789"

package-lock.json

Lines changed: 52 additions & 44 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

package.json

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "nmig",
3-
"version": "4.0.0",
3+
"version": "4.0.1",
44
"description": "The database migration app",
55
"author": "Anatoly Khaytovich<[email protected]>",
66
"license": "GPL-3.0",
@@ -12,17 +12,17 @@
1212
"node": ">=8.0.0"
1313
},
1414
"dependencies": {
15-
"mysql": "^2.16.0",
16-
"pg": "^7.9.0",
17-
"pg-copy-streams": "^2.2.0"
15+
"mysql": "^2.17.1",
16+
"pg": "^7.12.0",
17+
"pg-copy-streams": "^2.2.2"
1818
},
1919
"devDependencies": {
20-
"@types/mysql": "^2.15.5",
21-
"@types/node": "^11.13.4",
20+
"@types/mysql": "^2.15.6",
21+
"@types/node": "^12.6.8",
2222
"@types/pg": "^7.4.14",
2323
"@types/tape": "^4.2.33",
24-
"tape": "^4.10.1",
25-
"typescript": "^3.4.3"
24+
"tape": "^4.11.0",
25+
"typescript": "^3.5.3"
2626
},
2727
"scripts": {
2828
"build": "tsc",

src/DataPipeManager.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,7 @@ function getSmallestDataChunkSizeInMb(conversion: Conversion): number {
6161
/**
6262
* Creates an array of indexes, that point to data chunks, that will be processed during current COPY operation.
6363
*/
64-
function fillBandwidth(conversion: Conversion): number[] {
64+
async function fillBandwidth(conversion: Conversion): Promise<number[]> {
6565
const dataChunkIndexes: number[] = [];
6666

6767
// Loop through the data pool from the beginning to the end.
@@ -113,7 +113,7 @@ function fillBandwidth(conversion: Conversion): number[] {
113113

114114
if (firstUnprocessedChunkIndex === -1) {
115115
const msg: string = 'Something went wrong with DataPipeManager.';
116-
log(conversion, msg, undefined, true);
116+
await generateError(conversion, msg);
117117
process.exit();
118118
}
119119

@@ -135,7 +135,8 @@ async function pipeData(conversion: Conversion, dataLoaderPath: string, options:
135135
return processConstraints(conversion);
136136
}
137137

138-
const chunksToLoad: any[] = fillBandwidth(conversion).map((index: number) => conversion._dataPool[index]);
138+
const chunksIndexes: number[] = await fillBandwidth(conversion);
139+
const chunksToLoad: any[] = chunksIndexes.map((index: number) => conversion._dataPool[index]);
139140
const loaderProcess: ChildProcess = fork(dataLoaderPath, options);
140141

141142
loaderProcess.on('message', async (signal: any) => {

src/FsOps.ts

Lines changed: 23 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export function generateError(conversion: Conversion, message: string, sql: stri
2929
return new Promise<void>(resolve => {
3030
message += `\n\n\tSQL: ${sql}\n\n`;
3131
const buffer: Buffer = Buffer.from(message, conversion._encoding);
32-
log(conversion, message, undefined, true);
32+
log(conversion, message, undefined);
3333

3434
fs.open(conversion._errorLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
3535
if (error) {
@@ -48,13 +48,10 @@ export function generateError(conversion: Conversion, message: string, sql: stri
4848
* Writes given log to the "/all.log" file.
4949
* If necessary, writes given log to the "/{tableName}.log" file.
5050
*/
51-
export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, tableLogPath?: string, isErrorLog?: boolean): void {
51+
export function log(conversion: Conversion, log: string | NodeJS.ErrnoException, tableLogPath?: string): void {
52+
console.log(log);
5253
const buffer: Buffer = Buffer.from(`${ log }\n\n`, conversion._encoding);
5354

54-
if (!isErrorLog) {
55-
console.log(log);
56-
}
57-
5855
fs.open(conversion._allLogsPath, 'a', conversion._0777, (error: Error, fd: number) => {
5956
if (!error) {
6057
fs.write(fd, buffer, 0, buffer.length, null, () => {
@@ -124,28 +121,37 @@ export function readExtraConfig(config: any, baseDir: string): Promise<any> {
124121
/**
125122
* Creates logs directory.
126123
*/
127-
export function createLogsDirectory(conversion: Conversion): Promise<Conversion> {
128-
return new Promise<Conversion>(resolve => {
129-
const logTitle: string = 'FsOps::createLogsDirectory';
130-
console.log(`\t--[${ logTitle }] Creating logs directory...`);
124+
export async function createLogsDirectory(conversion: Conversion): Promise<Conversion> {
125+
const logTitle: string = 'FsOps::createLogsDirectory';
126+
await createDirectory(conversion, conversion._logsDirPath, logTitle);
127+
await createDirectory(conversion, conversion._notCreatedViewsPath, logTitle);
128+
return conversion;
129+
}
130+
131+
/**
132+
* Creates a directory at the specified path.
133+
*/
134+
function createDirectory(conversion: Conversion, directoryPath: string, logTitle: string): Promise<void> {
135+
return new Promise<void>(resolve => {
136+
console.log(`\t--[${ logTitle }] Creating directory ${ directoryPath }...`);
131137

132-
fs.stat(conversion._logsDirPath, (directoryDoesNotExist: Error, stat: fs.Stats) => {
138+
fs.stat(directoryPath, (directoryDoesNotExist: Error, stat: fs.Stats) => {
133139
if (directoryDoesNotExist) {
134-
fs.mkdir(conversion._logsDirPath, conversion._0777, e => {
140+
fs.mkdir(directoryPath, conversion._0777, e => {
135141
if (e) {
136-
console.log(`\t--[${ logTitle }] Cannot perform a migration due to impossibility to create "logs_directory": ${ conversion._logsDirPath }`);
142+
console.log(`\t--[${ logTitle }] Cannot perform a migration due to impossibility to create directory: ${ directoryPath }`);
137143
process.exit();
138144
} else {
139-
log(conversion, '\t--[logTitle] Logs directory is created...');
140-
resolve(conversion);
145+
log(conversion, `\t--[${ logTitle }] Directory ${ directoryPath } is created...`);
146+
resolve();
141147
}
142148
});
143149
} else if (!stat.isDirectory()) {
144150
console.log(`\t--[${ logTitle }] Cannot perform a migration due to unexpected error`);
145151
process.exit();
146152
} else {
147-
log(conversion, `\t--[${ logTitle }] Logs directory already exists...`);
148-
resolve(conversion);
153+
log(conversion, `\t--[${ logTitle }] Directory ${ directoryPath } already exists...`);
154+
resolve();
149155
}
150156
});
151157
});

0 commit comments

Comments
 (0)