How to import data from csv file into postegrsql using go? - postgresql

I have a docker container with *.csv files in it. I want to import data from that files into my postgresql database. The tables creates from schema.sql file.
I need to import from 3 different csv files data into 3 different tables in my db.
I tried to make it with db.Exec('COPY <table_name> FROM <path> DELIMITER ',' CSV HEADER; but it didn't work (error: cannot open file, or file doesn't exist)
So i make the following function and it works fine, but i think it is not a good solution.
func AddSomeDataIntoTable(tableName, path string, fieldsQty int) error {
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
reader := csv.NewReader(file)
if _, err := reader.Read(); err == io.EOF {
return fmt.Errorf("error: empty file")
}
//loop of reading
for i := 0; ; i++ {
record, err := reader.Read()
if err != nil {
if err != io.EOF {
return fmt.Errorf("Error reading file: %w", err)
}
if i == 0 {
return fmt.Errorf("Malformed csv file: there's only headers and no values")
} else {
log.Print("end of file")
//end of the file
return nil
}
}
//malformed csv handling
if len(record) != fieldsQty {
return fmt.Errorf("Malformed csv file: wrong number of fields")
}
for _, v := range record {
if v == "" {
return fmt.Errorf("malformed csv file: empty fields")
}
}
_, err = uuid.Parse(record[0])
if err != nil {
return fmt.Errorf("malformed id, should be a uuid: %w", err)
}
switch tableName {
case "<table_name1>":
_, err = db.Pdb.Exec(`INSERT INTO <table_name1> (field1, field2, field3, field4, field5) VALUES ($1, $2, $3, $4, $5)`,
record[0], record[1], record[2], record[3], record[4])
if err != nil {
return fmt.Errorf("internal db problems: %w", err)
}
case "<table_name2":
_, err = db.Pdb.Exec(`INSERT INTO table_name2 (field1, field2) VALUES ($1, $2)`,
record[0], record[1])
if err != nil {
return fmt.Errorf("internal db problems: %w", err)
}
case "<table_name3>":
_, err = db.Pdb.Exec(`INSERT INTO table_name3 (field1, field2) VALUES ($1, $2)`,
record[0], record[1])
if err != nil {
return fmt.Errorf("internal db problems: %w", err)
}
default:
return fmt.Errorf("cannot find such table")
}
}
return nil
}
May be someone can advise me any other better solution?

Related

Can i use Query() func twice in pgx Golang lib?

I use query to Find All DB data, and one of tables need to be array, so i integrated the loop.
EveryTime errors is conn is busy.
`
for rows.Next() {
var ord Order
err = rows.Scan(
and after one more Query() which is uses to find all Item with ord.UID. So the question is, what is wrong in my code? And how to use this. Here is my FindAll Func:
rows, err := r.client.Query(ctx, q)
if err != nil {
return nil, err
}
defer rows.Close()
orders := make([]Order, 0)
for rows.Next() {
var ord Order
err = rows.Scan(
&ord.OrderUID,
&ord.TrackNumber,
&ord.Entry,
&ord.Delivery.Name,
&ord.Delivery.Phone,
&ord.Delivery.Zip,
&ord.Delivery.City,
&ord.Delivery.Address,
&ord.Delivery.Region,
&ord.Delivery.Email,
&ord.Payment.Transaction,
&ord.Payment.RequestID,
&ord.Payment.Currency,
&ord.Payment.Provider,
&ord.Payment.Amount,
&ord.Payment.PaymentDT,
&ord.Payment.Bank,
&ord.Payment.DeliveryCost,
&ord.Payment.GoodsTotal,
&ord.Payment.CustomFee,
&ord.Locale,
&ord.InternalSignature,
&ord.CustomerID,
&ord.DeliveryService,
&ord.ShardKey,
&ord.SmID,
&ord.DateCreated,
&ord.OofShard,
)
if err != nil {
return nil, err
}
iq := ` ... `
itemRows, err := r.client.Query(ctx, iq, ord.OrderUID)
if err != nil {
return nil, err
}
items := make([]item.Item, 0)
for itemRows.Next() {
var item item.Item
err = itemRows.Scan(
&item.ID,
&item.ChrtID,
&item.TrackNumber,
&item.Price,
&item.Rid,
&item.Name,
&item.Sale,
&item.Size,
&item.TotalPrice,
&item.NmID,
&item.Brand,
&item.Status,
)
if err != nil {
return nil, err
}
items = append(items, item)
}
ord.Items = items
orders = append(orders, ord)
}
`
I tried to rows.Close, but then i Can't use row anymore.

Receiving error(*errors.errorString) *{s: "pq: unexpected DataRow in simple query execution"}

The error
(*errors.errorString) *{s: "pq: unexpected DataRow in simple query execution"}
appears after the line with the commentary. Didn't find any solution online. Since stackoverflow asks for more details, this is an update query that is supposed to update a todo and a list of subtasks in the database. The exact error is in the question topic. I post the complete code for the function that returns the error.
func (t *TodoTable) UpdateTodo(ctx context.Context, todo *Todo, t_id int) error {
tx, err := t.sqlxdb.BeginTxx(ctx, &sql.TxOptions{})
if err != nil {
return err
}
rollback_err := func(err error) error {
if err2 := tx.Rollback(); err2 != nil {
return fmt.Errorf("%v; %v", err, err2)
}
return err
}
row := tx.QueryRowxContext(ctx, "UPDATE todos SET todo_name=$1, deadline=$2, updated_at=$3 WHERE todo_id=$4 returning todo_id", todo.TodoName, todo.Deadline, todo.UpdatedAt, t_id)
if row.Err() != nil {
return rollback_err(err)
}
var subs_ids []int
// Getting subs ids from database
query := fmt.Sprintf("SELECT sub_id FROM subs WHERE todo_id=%d", t_id)
// THE ERROR COMES AFTER EXECUTING THE LINE BELOW
rows, err := tx.Query(query)
if err != nil {
rollback_err(err)
}
if rows != nil {
for rows.Next() {
var sub_id int
err = rows.Scan(&sub_id)
if err != nil {
rollback_err(err)
}
subs_ids = append(subs_ids, sub_id)
}
if err := tx.Commit(); err != nil {
return rollback_err(err)
}
}
// Updating subs
for i, sub := range todo.Subs {
_, err = tx.ExecContext(ctx, fmt.Sprintf("UPDATE subs SET sub_name='%s' WHERE sub_id=%d", sub.Sub_name, subs_ids[i]))
if err != nil {
return rollback_err(err)
}
}
return nil
}

Post Request with PostgreSQL and json-api returns an empty body

After a POST request, i was expecting to have a last inserted record marshalled into json, but instead returns an empty body. What am i not doing well?
package models
import (
"encoding/json"
"errors"
"flag"
"fmt"
"log"
"net/http"
"strconv"
"github.com/go-chi/chi"
"github.com/google/jsonapi"
"github.com/thedevsaddam/renderer"
"github.com/xo/dburl"
)
var rnd = renderer.New()
var flagVerbose = flag.Bool("v", false, "verbose")
var FlagURL = flag.String("url", "postgres://postgres:#127.0.0.1/sweb", "url")
// Page represents a row from 'public.pages'.
type Page struct {
Tag string `jsonapi:"attr,tag"` // tag
Body string `jsonapi:"attr,body"` // body
Slug string `jsonapi:"attr,slug"` // slug
Title string `jsonapi:"attr,title"` // title
ID int `jsonapi:"primary,pages"` // id
Link string `jsonapi:"attr,link"` // link
// xo fields
_exists, _deleted bool
}
func (page Page) JSONAPILinks() *jsonapi.Links {
return &jsonapi.Links{
"self": fmt.Sprintf("https://%d", page.ID),
}
}
I believe this is the culprit. After inserting a record, it should return the last inserted record as specified.
func (p *Page) PInsert(db XODB) (*Page, error) {
var err error
// if already exist, bail
if p._exists {
return p, errors.New("insert failed: already exists")
}
// sql insert query, primary key provided by sequence
const sqlstr = `INSERT INTO public.pages (` +
`tag, body, slug, title` +
`) VALUES (` +
`$1, $2, $3, $4` +
`) RETURNING id, tag, body, title`
// run query
XOLog(sqlstr, p.Tag, p.Body, p.Slug, p.Title)
err = db.QueryRow(sqlstr, p.Tag, p.Body, p.Slug, p.Title).Scan(&p.ID, &p.Tag, &p.Body, &p.Title)
if err != nil {
return p, err
}
// set existence
p._exists = true
return p, nil
}
Update updates the Page in the database and return last inserted records.
The same should apply for the Update function
func (p *Page) Update(db XODB) (*Page, error) {
var err error
// if doesn't exist, bail
if !p._exists {
return p, errors.New("update failed: does not exist")
}
// if deleted, bail
if p._deleted {
return p, errors.New("update failed: marked for deletion")
}
// sql query
const sqlstr = `UPDATE public.pages SET (` +
`tag, body, slug, title` +
`) = ( ` +
`$1, $2, $3, $4` +
`) WHERE id = $5`
// run query
XOLog(sqlstr, p.Tag, p.Body, p.Slug, p.Title, p.ID)
_, err = db.Exec(sqlstr, p.Tag, p.Body, p.Slug, p.Title, p.ID)
return p, err
}
func (p *Page) PSave(db XODB) (*Page, error) {
if p.Exists() {
return p.Update(db)
}
return p.PInsert(db)
}
func NewPage(w http.ResponseWriter, r *http.Request) {
db, err := dburl.Open(*FlagURL)
defer db.Close()
if err != nil {
log.Fatal(err)
}
var page Page
//page := new(Page)
if err := jsonapi.UnmarshalPayload(r.Body, &page); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
p, err := page.PSave(db)
if err != nil {
fmt.Println(err)
if err := jsonapi.MarshalPayload(w, p); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
fmt.Println(err)
}
}
w.Header().Set("Content-Type", jsonapi.MediaType)
w.WriteHeader(http.StatusCreated)
}
This is the last function i believe the issue is happening from. the last inserted record supposed to be marshalled into json.
Your last section of code contains a number of mistakes. The relevant section (without the useless and obfuscating Printlns) is:
p, err := page.PSave(db)
if err != nil {
if err := jsonapi.MarshalPayload(w, p); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
}
}
w.Header().Set("Content-Type", jsonapi.MediaType)
w.WriteHeader(http.StatusCreated)
And the primary mistake is that json.MarshalPayload is only called when err != nil. In other words, you only serialize the page if you failed to save it.
The secondary mistake is that jsonapi.MarshalPayload will call Write on the http.ResponseWriter. This turns all subsequent calls to Header().Set and WriteHeader into no-ops.
More correct code would look like this.
// 1. Save the page in the database, bail on error
p, err := page.PSave(db)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// 2. Marshal the page into an intermediate buffer, bail on error
var buf bytes.Buffer
if err := jsonapi.MarshalPayload(&buf, p); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
// 3. Write the entire response; failures to write the intermediate buffer
// cannot be communicated over HTTP
w.Header().Set("Content-Type", jsonapi.MediaType)
w.WriteHeader(http.StatusCreated)
if _, err := buf.WriteTo(w); err != nil {
log.Printf("failed to write response: %v", err)
return
}

How to stream binary data into a PostgreSQL BYTEA column using the Golang lib/pq API?

I'd like to insert some binary data into a BYTEA column,
How would I go about streaming the contents of somefile.tar.gz into a table with a BYTEA column?
Is it possible to stream to/from postgres from/to golang?
If one would be willing to switch to github.com/jackc/pgx, Large Objects (PostgreSQL docs) can be streamed. The pgx.LargeObject type implements:
io.Writer
io.Reader
io.Seeker
io.Closer
Large Objects are stored in a system table, there is no Large Object type which can be used in a column of a table. Large Objects are referenced by their object identifier. So a separate table needs to be maintained with file metadata and oid mapping.
Example program:
package main
import (
"context"
"io"
"log"
"os"
"time"
"github.com/jackc/pgx/v4"
)
const (
// files table maps Large Object oid to file names
createFileTable = `CREATE TABLE files (
id oid primary key,
name varchar,
unique(name)
);`
)
func main() {
ctx, cancel := context.WithTimeout(context.TODO(), time.Minute)
defer cancel()
conn, err := pgx.Connect(ctx, "user=postgres host=/run/postgresql dbname=postgres")
if err != nil {
panic(err)
}
defer conn.Close(ctx)
if _, err = conn.Exec(ctx, createFileTable); err != nil {
panic(err)
}
written, err := storeFile(ctx, conn, "somefile.bin")
log.Printf("storeFile written: %d", written)
if err != nil {
panic(err)
}
read, err := loadFile(ctx, conn, "somefile.bin")
log.Printf("loadFile read: %d", read)
if err != nil {
panic(err)
}
}
// storeFile as Large Object in the database.
// The resulting object identifier is stored along with the file name in the files table.
// The amount of written bytes and an erorr is returned, if one occured.
func storeFile(ctx context.Context, conn *pgx.Conn, name string) (written int64, err error) {
file, err := os.Open(name)
if err != nil {
return 0, err
}
defer file.Close()
// LargeObjects can only operate on an active TX
tx, err := conn.Begin(ctx)
if err != nil {
return 0, err
}
defer tx.Rollback(ctx)
lobs := tx.LargeObjects()
// Create a new Large Object.
// We pass 0, so the DB can pick an available oid for us.
oid, err := lobs.Create(ctx, 0)
if err != nil {
return 0, err
}
// record the oid and filename in the files table
_, err = tx.Exec(ctx, "INSERT INTO files (id, name) VALUES ($1, $2)", oid, name)
if err != nil {
return 0, err
}
// Open the new Object for writing.
obj, err := lobs.Open(ctx, oid, pgx.LargeObjectModeWrite)
if err != nil {
return 0, err
}
// Copy the file stream to the Large Object stream
written, err = io.Copy(obj, file)
if err != nil {
return written, err
}
err = tx.Commit(ctx)
return written, err
}
// loadFile loads the file identified by name as Large Object
// and writes the contents to a local file by the same name.
// The amount of bytes read or an error is returned.
func loadFile(ctx context.Context, conn *pgx.Conn, name string) (read int64, err error) {
tx, err := conn.Begin(ctx)
if err != nil {
return 0, err
}
defer tx.Rollback(ctx)
var oid uint32
err = conn.QueryRow(ctx, "SELECT id FROM files WHERE name = $1", name).Scan(&oid)
if err != nil {
return 0, err
}
file, err := os.Create(name)
if err != nil {
return 0, err
}
lobs := tx.LargeObjects()
obj, err := lobs.Open(ctx, oid, pgx.LargeObjectModeRead)
if err != nil {
return 0, err
}
return io.Copy(file, obj)
}

Converting Exec argument $2 type: invalid character ‘b’ looking for beginning of value

I have a error when I try to update filed in JSONB with any letter, but if I update same filed with numbers it's work perfect. sql: converting Exec argument $2 type: invalid character ‘b’ looking for beginning of value. Example of insert type: params["imei_json"] = types.JSONText(params["imei"].(string))
//Update User Information
func (metadata *accountPhoneNumberMetadata) UpdateUserInfo(params map[string]interface{}) error {
params["imei_json"] = types.JSONText(params["imei"].(string))
params["device_unique_id_json"] = types.JSONText(params["device_unique_id"].(string))
tx := metadata.client.MustBegin()
if _, err := tx.NamedExec(metadata.sqlMetadataUpdate(), params); err != nil {
tx.Rollback()
return err
}
if _, err := tx.NamedExec(metadata.sqlPhoneNumbers(), params); err != nil {
tx.Rollback()
return err
}
if _, err := tx.NamedExec(metadata.sqlUpdateRegistrationMetadata(), params); err != nil {
tx.Rollback()
return err
}
if err := tx.Commit(); err != nil {
tx.Rollback()
return err
}
return nil
}
func (metadata *accountPhoneNumberMetadata) sqlUpdateRegistrationMetadata() string {
return `UPDATE registrations.metadata
SET
metadata =
jsonb_set(jsonb_set(metadata, '{react_native_imei, device_imei}', :imei_json),
'{react_native_device_info, device_unique_id}', :device_unique_id_json )
WHERE
metadata_id = (SELECT metadata_id FROM registrations.metadata WHERE registration_id = :registration_id LIMIT 1)`
}
I have decided it like this, just add quotes.
JSON requires double quotes for all strings
params["imei_json"] = types.JSONText(`"` + params["imei"].(string) + `"`)
params["device_unique_id_json"] = types.JSONText(`"` + params["device_unique_id"].(string) + `"`)