Please help me look at the code. I want to write a tool to export data from the database to excel. although it has been implemented at present, its execution efficiency is very slow?

  golang, question

Overview: The younger brother’s program can query the sql in the database and write it into excel by inputting the database type, database connection string, executing sql or files containing sql, etc.
Performance: 25W pieces of data are currently exported, with 5 fields per row of data, database type oracle, driving goracle, excel generation library using excelize, taking 1 minute and 33 seconds; Python3.6 is used in the same environment, cx_Oracle is driven by the database, pyexcelerate is used in the excel generation library, and the same query statement takes 51s (yes … not done Python/(O)/~)
Program Description: In order to improve the execution efficiency, I used goruntine. One thread specially executes sql and generates []interface{} and loads the result into the channel. Another thread continuously takes []interface{} out of the channel and writes it into excel
Personal feeling may have problems:
1. golang’s database query method can only be generated one by one. At the same time, the mechanism uses reflection, unlike python, which can obtain a large amount of data at one time through fetchmany. I wonder if there will be any performance gap here.
2. When the field type is date type and the field is empty, if isZero judgment is not made, the date zero value output to excel is abnormal (the value is -5XXXXX, which is displayed as # # # # # # # # # # #). Therefore, for each []interface{}, it is necessary to judge whether the type is a date, and if so, whether it is a zero value, which may affect the efficiency. Python does not have this problem

Please golang give us some advice on optimization. Thank you
The code is as follows:

package main

import (
    _ ""
    _ ""

func DataGetter(db *sqlx.DB,query string,rowChan chan <- []interface{},columnChan chan <- []string){
    defer db.Close()
    row,err := db.Queryx(query)
    if err != nil{
    defer row.Close()
    columns,err := row.Columns()
    columnChan <- columns
    if err !=nil {
        panic(fmt.Sprint("failed to add sheet:%s",err.Error()))
    for row.Next(){
        r,err := row.SliceScan()
        if err !=nil{
            panic("db row query failed")
        rowChan <- r

func ExcelWriter(sheetHead string,fileName string,rowChan <- chan[]interface{},columnChan <- chan[]string){
    cnt := 2
    sheetcnt := 1
    var r []interface{}
    columns := <- columnChan
    hasNext := true
    excel := excelize.NewFile()
    for hasNext{
        r,hasNext = <- rowChan
        for a := 0;a<len(columns);a++{
            t,ok := r[a].(time.Time)
            if ok{
                if t.IsZero(){
        cnt = cnt + 1
        if cnt >= 100000{
            sheetHead = sheetHead+strconv.Itoa(sheetcnt)
            cnt = 2
            sheetcnt = sheetcnt + 1

func getConn(dbconn string,dbtype string)(db *sqlx.DB){
    if dbtype == "oracle"{
        driver := "goracle"
        return sqlx.MustOpen(driver,strings.Replace(dbconn,":","/",1))
    }else if dbtype == "db2"{
        driver := "cli"
        userPart := strings.Split(dbconn,"@")[0]
        username := strings.Split(userPart,":")[0]
        password := strings.Split(userPart,":")[1]
        dbPart := strings.Split(dbconn,"@")[0]
        dbname := strings.Split(dbPart,"/")[1]
        dbip := strings.Split(strings.Split(dbPart,"/")[0],":")[0]
        dbport := strings.Split(strings.Split(dbPart,"/")[0],":")[1]
        connString := fmt.Sprintf("Driver={IBM DB2 ODBC Driver};Hostname=%s;Port=%s;Protocol=TCPIP;Database=%s;CurrentSchema=%s;UID=%s;PWD=%s;",
        return sqlx.MustOpen(driver,connString)
    }else if dbtype == "postgres"{
        driver := "postgres"
        connString := "postgres://" + dbconn
        return sqlx.MustOpen(driver,connString)
        fmt.Println("dbtype not matched!")

func main() {
    dbconn := flag.String("d","",`Database connect string,use "user:password@ip:port/dbname" for db2 or "user:password@tnsname" for oracle`)
    dbtype := flag.String("t","","Database type:oracle db2 mysql mssql")
    filetype := flag.String("f","xlsx","exported file type:xlsx or txt default:xlsx")
    //xlsx require options
    sheetname := flag.String("h","Sheet1","sheet name: default Sheet1")
    //txt require options
    //charset := flag.String("c","utf-8","charset for exported text file:gbk utf-8 and so on")
    //separator := flag.String("s","/","separator: default:/")
    //sql options
    query := flag.String("q","","sql in one line")
    sqlfile := flag.String("l","","sqlfile")
    filename := flag.String("n",time.Now().Format("20060102150405"),"filename")

    if *dbconn == "" || *dbtype == "" || *filetype == ""{
    if *query == "" && *sqlfile == ""{
    if *sqlfile != "" {
        sqlbyte,err := ioutil.ReadFile(*sqlfile)
        if err != nil{
            panic("read sqlfile failed!")
        utf8 := mahonia.NewEncoder("utf-8")
        *query = utf8.ConvertString(string(sqlbyte))
    if *filetype == "xlsx"{
        rowChan := make(chan []interface{},50000)
        columnsChan := make(chan []string)
        db := getConn(*dbconn,*dbtype)
        go DataGetter(db,*query,rowChan,columnsChan)
        //}else if *filetype == "txt"{
        //    db := getConn(*dbconn,*dbtype)
        //    TextFileExporter(db,*charset,*separator,*filename,*query)


Python’s fetchmany must have been taken out one by one, just to help you combine and return the array.