Files
go-migrate/cmd/go_migrate/process.go

106 lines
2.8 KiB
Go

package main
import (
"context"
"database/sql"
"fmt"
"sync"
"github.com/jackc/pgx/v5/pgxpool"
_ "github.com/microsoft/go-mssqldb"
log "github.com/sirupsen/logrus"
)
func processMigrationJob(sourceDb *sql.DB, targetDb *pgxpool.Pool, job MigrationJob) {
sourceColTypes, targetColTypes, err := GetColumnTypes(sourceDb, targetDb, job)
if err != nil {
log.Fatal("Unexpected error: ", err)
}
logColumnTypes(sourceColTypes, "Source col types")
logColumnTypes(targetColTypes, "Target col types")
chRowsExtract := make(chan []UnknownRowValues, QueueSize)
mssqlContext := context.Background()
go func() {
if err := extractFromMssql(mssqlContext, job, sourceColTypes, ChunkSize, sourceDb, chRowsExtract); err != nil {
log.Error("Unexpected error extrating data from mssql: ", err)
}
close(chRowsExtract)
}()
var wgMssqlTransformers sync.WaitGroup
wgMssqlTransformers.Go(func() {
transformRows(job, sourceColTypes, "sqlserver", chRowsExtract)
})
wgMssqlTransformers.Wait()
chRowsExtractPostgres := make(chan []UnknownRowValues, QueueSize)
postgresContext := context.Background()
go func() {
if err := extractFromPostgres(postgresContext, job, sourceColTypes, ChunkSize, targetDb, chRowsExtractPostgres); err != nil {
log.Error("Unexpected error extrating data from postgres: ", err)
}
close(chRowsExtractPostgres)
}()
var wgPostgresTransformers sync.WaitGroup
wgPostgresTransformers.Go(func() {
transformRows(job, sourceColTypes, "postgres", chRowsExtractPostgres)
})
wgPostgresTransformers.Wait()
}
func logColumnTypes(columnTypes []ColumnType, label string) {
log.Info(label)
for _, col := range columnTypes {
log.Infof("%+v", col)
}
}
func transformRows(job MigrationJob, columns []ColumnType, driver string, in <-chan []UnknownRowValues) {
for rows := range in {
log.Debugf("Chunk received (%s), transforming...", driver)
for i, rowValues := range rows {
for i, col := range columns {
value := rowValues[i]
if col.SystemType() == "uniqueidentifier" && driver == "sqlserver" {
if b, ok := value.([]byte); ok {
rowValues[i] = mssqlUuidToBigEndian(b)
}
}
}
if i%100 == 0 {
logSampleRow(job, columns, rowValues, fmt.Sprintf("row %d", i))
}
}
}
}
func logSampleRow(job MigrationJob, columns []ColumnType, rowValues UnknownRowValues, tag string) {
log.Infof("[%s.%s] Sample row: (%s)", job.Schema, job.Table, tag)
for i, col := range columns {
log.Infof("%s: %v", col.Name(), rowValues[i])
}
}
func mssqlUuidToBigEndian(mssqlUuid []byte) []byte {
if len(mssqlUuid) != 16 {
return mssqlUuid
}
pgUuid := make([]byte, 16)
pgUuid[0], pgUuid[1], pgUuid[2], pgUuid[3] = mssqlUuid[3], mssqlUuid[2], mssqlUuid[1], mssqlUuid[0]
pgUuid[4], pgUuid[5] = mssqlUuid[5], mssqlUuid[4]
pgUuid[6], pgUuid[7] = mssqlUuid[7], mssqlUuid[6]
copy(pgUuid[8:], mssqlUuid[8:])
return pgUuid
}