pq:剩余的连接插槽保留用于非复制超级用户和rds_superuser连接

时间:2019-04-11 17:11:30

标签: postgresql go

我不断收到此错误:

  

pq:剩余的连接插槽保留用于非复制超级用户和rds_superuser连接

我已经阅读了许多文章,但这仅是数据库方面的解决方案,而不是golang的解决方案。我无法更改数据库配置,max_connections已经在3500

过程是: 我通过调用API获取数据,然后将响应数据直接存储到某人的数据库中,而不是通过API

ive尝试添加time.sleep,重用http和池化http,设置传输,但是此问题似乎与连接无关。

*我已经检查过,我关闭了响应。正文

GET:

client := &http.Client{
    Transport: &http.Transport{
        MaxIdleConns: 100,
        MaxIdleConnsPerHost: 100,
    },
    Timeout: time.Duration(30) * time.Second,
}

req, err := http.NewRequest("GET", magento_url, nil)
response, err := client.Do(req)

if err != nil {
    fmt.Printf("The HTTP request failed with error %s\n", err)
} 

defer response.Body.Close()

body, err := ioutil.ReadAll(response.Body)
if err != nil {
   log.Fatalf("Couldn't parse response body. %+v", err)
}
fmt.Println("Response Body:", string(body)) 

countriesMap := make(map[string]interface{})

err = json.Unmarshal([]byte(body), &countriesMap)
if err != nil {
    panic(err.Error())
}   

for  key,val := range countriesMap {
     if(key=="items"){
        switch concreteVal := val.(type) {
           case []interface{}:
               fmt.Println(key)
               parseArray(val.([]interface{}))

            default:
                fmt.Println(key, ":", concreteVal)

            }
        }
     }

POST:

func parseArray(anArray []interface{}) {
   arrayData := make(map[string]interface{})
   for i, val := range anArray {
       switch concreteVal := val.(type) {
       case map[string]interface{}:
           fmt.Println("Index::", i)

           for key, valLast := range val.(map[string]interface{}) {
               switch concreteValLast := valLast.(type) {

               default:
                   if key=="coupon_code"{
                       arrayData["coupon_code"]=concreteValLast
                   }else if key=="coupon_description"{
                       arrayData["coupon_description"]=concreteValLast
                   }
               }
           }
       default:
           fmt.Println("Index", i, ":", concreteVal)

       }
       dm_connection := []string{"user=",dm_username," password=",dm_password," dbname=",dm_database," port=",dm_port," host=",dm_host};
       db, err := sql.Open("postgres", strings.Join(dm_connection,""))
       if err != nil {
         panic(message)
       }
      defer db.Close()

      var dataField ApiData

      checkingData := `SELECT coupon_code FROM discount WHERE coupon_code = $1`
      err = db.QueryRow(checkingData, arrayData["coupon_code"]).Scan(&dataField.Coupon_code)

            if err != nil {
                if err == sql.ErrNoRows {
                    //INSERT DM
                    InsertDM(db, arrayData)

                }else{
                    fmt.Println(message)
                }
            }
     func InsertDM(db *sql.DB, arrayData map[string]interface{}){

       insertStatement:=`INSERT INTO discount (coupon_code,coupon_description,created_at) VALUES ($1,$2,$3)`
       _,errInsertDM := db.Exec(insertStatement,arrayData["coupon_code"],arrayData["coupon_description"],arrayData["timestamp"])

   // if there is an error inserting, handle it
       if errInsertDM != nil {
           message:=errInsertDM.Error()
          panic(message)
       }

       insertedData++

    }
}

我希望它可以处理15.000个数据,但该错误始终会像3000个数据一样出现

0 个答案:

没有答案