feat: add MSSQL UUID transformation and update migration process for channel handling

This commit is contained in:
2026-04-06 21:53:09 -05:00
parent 8de4838e00
commit f305ddec0b
2 changed files with 42 additions and 26 deletions

View File

@@ -0,0 +1,14 @@
package main
func mssqlUuidToBigEndian(mssqlUuid []byte) []byte {
if len(mssqlUuid) != 16 {
return mssqlUuid
}
pgUuid := make([]byte, 16)
pgUuid[0], pgUuid[1], pgUuid[2], pgUuid[3] = mssqlUuid[3], mssqlUuid[2], mssqlUuid[1], mssqlUuid[0]
pgUuid[4], pgUuid[5] = mssqlUuid[5], mssqlUuid[4]
pgUuid[6], pgUuid[7] = mssqlUuid[7], mssqlUuid[6]
copy(pgUuid[8:], mssqlUuid[8:])
return pgUuid
}

View File

@@ -22,6 +22,7 @@ func processMigrationJob(sourceDb *sql.DB, targetDb *pgxpool.Pool, job Migration
logColumnTypes(targetColTypes, "Target col types") logColumnTypes(targetColTypes, "Target col types")
chRowsExtract := make(chan []UnknownRowValues, QueueSize) chRowsExtract := make(chan []UnknownRowValues, QueueSize)
chRowsTransform := make(chan []UnknownRowValues)
mssqlContext := context.Background() mssqlContext := context.Background()
go func() { go func() {
@@ -31,11 +32,16 @@ func processMigrationJob(sourceDb *sql.DB, targetDb *pgxpool.Pool, job Migration
close(chRowsExtract) close(chRowsExtract)
}() }()
var wgMssqlTransformers sync.WaitGroup go func() {
wgMssqlTransformers.Go(func() { transformRowsMssql(sourceColTypes, chRowsExtract, chRowsTransform)
transformRows(job, sourceColTypes, "sqlserver", chRowsExtract) close(chRowsTransform)
}()
var wgFakeLoaders sync.WaitGroup
wgFakeLoaders.Go(func() {
fakeLoader(job, sourceColTypes, chRowsTransform)
}) })
wgMssqlTransformers.Wait()
chRowsExtractPostgres := make(chan []UnknownRowValues, QueueSize) chRowsExtractPostgres := make(chan []UnknownRowValues, QueueSize)
postgresContext := context.Background() postgresContext := context.Background()
@@ -47,12 +53,11 @@ func processMigrationJob(sourceDb *sql.DB, targetDb *pgxpool.Pool, job Migration
close(chRowsExtractPostgres) close(chRowsExtractPostgres)
}() }()
var wgPostgresTransformers sync.WaitGroup wgFakeLoaders.Go(func() {
wgPostgresTransformers.Go(func() { fakeLoader(job, targetColTypes, chRowsExtractPostgres)
transformRows(job, sourceColTypes, "postgres", chRowsExtractPostgres)
}) })
wgPostgresTransformers.Wait() wgFakeLoaders.Wait()
} }
func logColumnTypes(columnTypes []ColumnType, label string) { func logColumnTypes(columnTypes []ColumnType, label string) {
@@ -63,24 +68,22 @@ func logColumnTypes(columnTypes []ColumnType, label string) {
} }
} }
func transformRows(job MigrationJob, columns []ColumnType, driver string, in <-chan []UnknownRowValues) { func transformRowsMssql(columns []ColumnType, in <-chan []UnknownRowValues, out chan<- []UnknownRowValues) {
for rows := range in { for rows := range in {
log.Debugf("Chunk received (%s), transforming...", driver) log.Debugf("Chunk received, transforming...")
for i, rowValues := range rows { for _, rowValues := range rows {
for i, col := range columns { for i, col := range columns {
value := rowValues[i] value := rowValues[i]
if col.SystemType() == "uniqueidentifier" && driver == "sqlserver" { if col.SystemType() == "uniqueidentifier" {
if b, ok := value.([]byte); ok { if b, ok := value.([]byte); ok {
rowValues[i] = mssqlUuidToBigEndian(b) rowValues[i] = mssqlUuidToBigEndian(b)
} }
} }
} }
if i%100 == 0 {
logSampleRow(job, columns, rowValues, fmt.Sprintf("row %d", i))
}
} }
out <- rows
} }
} }
@@ -91,15 +94,14 @@ func logSampleRow(job MigrationJob, columns []ColumnType, rowValues UnknownRowVa
} }
} }
func mssqlUuidToBigEndian(mssqlUuid []byte) []byte { func fakeLoader(job MigrationJob, columns []ColumnType, in <-chan []UnknownRowValues) {
if len(mssqlUuid) != 16 { for rows := range in {
return mssqlUuid log.Debugf("Chunk received, loading data into...")
}
pgUuid := make([]byte, 16)
pgUuid[0], pgUuid[1], pgUuid[2], pgUuid[3] = mssqlUuid[3], mssqlUuid[2], mssqlUuid[1], mssqlUuid[0]
pgUuid[4], pgUuid[5] = mssqlUuid[5], mssqlUuid[4]
pgUuid[6], pgUuid[7] = mssqlUuid[7], mssqlUuid[6]
copy(pgUuid[8:], mssqlUuid[8:])
return pgUuid for i, rowValues := range rows {
if i%100 == 0 {
logSampleRow(job, columns, rowValues, fmt.Sprintf("row %d", i))
}
}
}
} }