Files
go-migrate/cmd/go_migrate/process.go

117 lines
3.0 KiB
Go

package main
import (
"context"
"database/sql"
"fmt"
"sync"
"time"
"github.com/jackc/pgx/v5/pgxpool"
_ "github.com/microsoft/go-mssqldb"
log "github.com/sirupsen/logrus"
)
func processMigrationJob(sourceDb *sql.DB, targetDb *pgxpool.Pool, job MigrationJob) {
sourceColTypes, targetColTypes, err := GetColumnTypes(sourceDb, targetDb, job)
if err != nil {
log.Fatal("Unexpected error: ", err)
}
logColumnTypes(sourceColTypes, "Source col types")
logColumnTypes(targetColTypes, "Target col types")
chRowsExtract := make(chan []UnknownRowValues, QueueSize)
chRowsTransform := make(chan []UnknownRowValues)
mssqlContext := context.Background()
go func() {
if err := extractFromMssql(mssqlContext, job, sourceColTypes, ChunkSize, sourceDb, chRowsExtract); err != nil {
log.Error("Unexpected error extrating data from mssql: ", err)
}
close(chRowsExtract)
}()
go func() {
transformRowsMssql(sourceColTypes, chRowsExtract, chRowsTransform)
close(chRowsTransform)
}()
var wgFakeLoaders sync.WaitGroup
wgFakeLoaders.Go(func() {
fakeLoader(job, sourceColTypes, chRowsTransform)
})
chRowsExtractPostgres := make(chan []UnknownRowValues, QueueSize)
postgresContext := context.Background()
go func() {
if err := extractFromPostgres(postgresContext, job, sourceColTypes, ChunkSize, targetDb, chRowsExtractPostgres); err != nil {
log.Error("Unexpected error extrating data from postgres: ", err)
}
close(chRowsExtractPostgres)
}()
wgFakeLoaders.Go(func() {
fakeLoader(job, targetColTypes, chRowsExtractPostgres)
})
wgFakeLoaders.Wait()
}
func logColumnTypes(columnTypes []ColumnType, label string) {
log.Info(label)
for _, col := range columnTypes {
log.Infof("%+v", col)
}
}
func transformRowsMssql(columns []ColumnType, in <-chan []UnknownRowValues, out chan<- []UnknownRowValues) {
for rows := range in {
log.Debugf("Chunk received, transforming...")
for _, rowValues := range rows {
for i, col := range columns {
value := rowValues[i]
if col.SystemType() == "uniqueidentifier" {
if b, ok := value.([]byte); ok {
rowValues[i] = mssqlUuidToBigEndian(b)
}
} else if col.SystemType() == "geometry" || col.SystemType() == "geography" {
if b, ok := value.([]byte); ok {
rowValues[i] = wkbToEwkbWithSrid(b, 4326)
}
} else if col.SystemType() == "datetime" || col.SystemType() == "datetime2" {
if t, ok := value.(time.Time); ok {
rowValues[i] = ensureUTC(t)
}
}
}
}
out <- rows
}
}
func logSampleRow(job MigrationJob, columns []ColumnType, rowValues UnknownRowValues, tag string) {
log.Infof("[%s.%s] Sample row: (%s)", job.Schema, job.Table, tag)
for i, col := range columns {
log.Infof("%s (%T): %v", col.Name(), rowValues[i], rowValues[i])
}
}
func fakeLoader(job MigrationJob, columns []ColumnType, in <-chan []UnknownRowValues) {
for rows := range in {
log.Debugf("Chunk received, loading data into...")
for i, rowValues := range rows {
if i%100 == 0 {
logSampleRow(job, columns, rowValues, fmt.Sprintf("row %d", i))
}
}
}
}