Passing in dynamic struct into function in golang - mongodb

I having a couple of structs, Products and Categories. I have 2 functions listed below that have identical logic just different structs that are being used and returned. Is there anyway I can abstract out the struct data types and use the same logic in a single function called GetObjects?
func GetCategories(collection *mongo.Collection) []Category {
ctx := context.Background()
cats := []Category{}
cur, err := collection.Find(ctx, bson.M{})
if err != nil {
log.Fatal("Error: ", err)
}
for cur.Next(context.TODO()) {
var cat Category
err = cur.Decode(&cat)
if err != nil {
log.Fatal(err)
}
cats = append(cats, cat)
}
return cats
}
func GetProducts(collection *mongo.Collection) []Product {
ctx := context.Background()
prods := []Product{}
cur, err := collection.Find(ctx, bson.M{})
if err != nil {
log.Fatal("Error: ", err)
}
for cur.Next(context.TODO()) {
var prod Product
err = cur.Decode(&prod)
if err != nil {
log.Fatal(err)
}
prods = append(prods, prod)
}
return prods
}

You could create a generalized GetObjs() if you would pass in the destination where you want the results to be loaded.
Something like:
func GetObjs(c *mongo.Collection, dst interface{})
And callers are responsible to pass a ready slice or a pointer to a slice variable where results will be stored.
Also note that context.Context should be passed and not created arbitrarily inside the function:
func GetObjs(ctx context.Context, c *mongo.Collection, dst interface{})
Also, errors should be returned and not "swallowed", so if one occurs, it can be dealt with appropriately at the caller:
func GetObjs(ctx context.Context, c *mongo.Collection, dst interface{}) error
Also, if you need all results, you don't need to iterate over them and decode all one-by-one. Just use Cursor.All().
This is how the "improved" GetObjs() could look like:
func GetObjs(ctx context.Context, c *mongo.Collection, dst interface{}) error {
cur, err := c.Find(ctx, bson.M{})
if err != nil {
return err
}
return cur.All(ctx, dst)
}
(Although this became quite simple, not sure it warrants its own existence.)
And this is how you could use it:
ctx := ... // Obtain context, e.g. from the request: r.Context()
c := ... // Collection you want to query from
var cats []Category
if err := GetObjs(ctx, c, &cats); err != nil {
// Handle error
return
}
// Use cats
var prods []Product
if err := GetObjs(ctx, c, &prods); err != nil {
// Handle error
return
}
// Use prods

Related

Is there a way with GRPC to notify the stream of CRUD operations to give realtime updates to the client?

I am new to GRPC and I am trying to implement a basic CRUD + listing. I use unary rpc's for the CRUD and a server stream for the listing. What I would like to do however is update the client whenever someone changes a record in the database that you are listing.
So for example user A is listing 10 companies. And user B is updating one of those companies. I want user A's client to be updated once the update rpc is called.
This is what I have for now
func RegisterCompanyServer(l hclog.Logger, gs *grpc.Server) {
r := postgres.NewPostgresCompanyRepository()
cs := NewCompanyServer(l, r)
pb.RegisterCompanyServiceServer(gs, cs)
}
type CompanyServer struct {
logger hclog.Logger
repo repo.CompanyRepository
pb.UnimplementedCompanyServiceServer
}
func NewCompanyServer(l hclog.Logger, r repo.CompanyRepository) *CompanyServer {
return &CompanyServer{
logger: l,
repo: r,
}
}
func (c *CompanyServer) ListCompany(req *pb.CompanyListRequest, stream pb.CompanyService_ListCompanyServer) error {
//Somehow listen to CreateCompany() and update the client
companies, err := c.repo.List(req.Query)
if err != nil {
return err
}
for _, c := range companies {
bytes, err := json.Marshal(c)
if err != nil {
return err
}
out := &pb.Company{}
if err = jsonEnc.Unmarshal(bytes, out); err != nil {
return err
}
res := &pb.CompanyListResponse{
Company: out,
}
err = stream.Send(res)
if err != nil {
return err
}
}
return nil
}
func (c *CompanyServer) CreateCompany(context context.Context, req *pb.CompanyCreateRequest) (*pb.CompanyCreateResponse, error) {
input := req.GetCompany()
if input == nil {
return nil, errors.New("Parsing Error")
}
bytes, err := jsonEnc.Marshal(input)
if err != nil {
return nil, err
}
company := &myCompany.Company{}
if err = json.Unmarshal(bytes, company); err != nil {
return nil, err
}
result, err := c.repo.Create(company)
if err != nil {
return nil, err
}
res := &pb.CompanyCreateResponse{
Id: result,
}
//Somehow notify the stream that a company was created
return res, nil
}
Is this even feasable with GRPC? What techniques are out there to do this? I am currently working with a postgresql database.

How to have mongodb decode struct passed into function

The Decode step of the following code does not populate the original document object correctly. It overwrites it with a bson object.
func main() {
c := Call{}
dbGetObject("collection", &c)
}
func dbGetObject(collectionName string, document interface{}) (err error) {
uri, creds, auth := dbGetAuth()
clientOpts := options.Client().ApplyURI(uri).SetAuth(creds)
client, err := mongo.Connect(context.TODO(), clientOpts)
if err != nil {
log.Fatal(err)
return err
}
defer client.Disconnect(context.TODO())
collection := client.Database(auth.Database).Collection(collectionName)
err = collection.FindOne(context.TODO(), bson.M{"number": "12345"}).Decode(&document)
if err != nil {
log.Fatal(err)
return err
}
return nil
}
Yet the following code does work properly:
func dbGetObject(collectionName string) (err error) {
uri, creds, auth := dbGetAuth()
clientOpts := options.Client().ApplyURI(uri).SetAuth(creds)
client, err := mongo.Connect(context.TODO(), clientOpts)
if err != nil {
log.Fatal(err)
return err
}
defer client.Disconnect(context.TODO())
collection := client.Database(auth.Database).Collection(collectionName)
c := Call{}
err = collection.FindOne(context.TODO(), bson.M{"number": "12345"}).Decode(&c)
if err != nil {
log.Fatal(err)
return err
}
return nil
}
The only difference being that the instance of the struct is passed into the function vs instantiated in the dbGetObject function. What am I doing wrong?
In the first example, the type of document is interface.
if you fix the argument type as below, it will work correctly.
func dbGetObject(collectionName string, document *Call)
I actually realized what is going on. In the call to the function and then to Decode I am passing a pointer. So, it's actually passing a pointer to a pointer into the Decode call. So the fix is to change the decode call from:
err = collection.FindOne(context.TODO(), bson.M{"number": "12345"}).Decode(&document)
to
err = collection.FindOne(context.TODO(), bson.M{"number": "12345"}).Decode(document)

Custom BSON marshal and unmarshal using mongo-driver

I have a struct field like this below. I also store raw protobuf of the same struct in db. Now every time fetch or save data to mongo. I have to update ReallyBigRaw, from the proto when I want to save to DB and when fetch I have to unmarshal ReallyBigRaw to ReallyBigObj to give out responses. Is there a way I can implement some interface or provide some callback functions so that the mongo driver does this automatically before saving or fetching data from DB.
Also, I am using the offical golang mongo driver not mgo, I have read some answers where can be done in mgo golang library.
import (
"github.com/golang/protobuf/jsonpb"
"go.mongodb.org/mongo-driver/bson"
"go.mongodb.org/mongo-driver/bson/primitive"
proto "github.com/dinesh/api/go"
)
type ReallyBig struct {
ID string `bson:"_id,omitempty"`
DraftID string `bson:"draft_id,omitempty"`
// Marshaled ReallyBigObj proto to map[string]interface{} stored in DB
ReallyBigRaw map[string]interface{} `bson:"raw,omitempty"`
ReallyBigObj *proto.ReallyBig `bson:"-"`
CreatedAt primitive.DateTime `bson:"created_at,omitempty"`
UpdatedAt primitive.DateTime `bson:"updated_at,omitempty"`
}
func (r *ReallyBig) GetProto() (*proto.ReallyBig, error) {
if r.ReallyBigObj != nil {
return r.ReallyBigObj, nil
}
Obj, err := getProto(r.ReallyBigRaw)
if err != nil {
return nil, err
}
r.ReallyBigObj = Obj
return r.ReallyBigObj, nil
}
func getRaw(r *proto.ReallyBig) (map[string]interface{}, error) {
m := jsonpb.Marshaler{}
b := bytes.NewBuffer([]byte{})
// marshals proto to json format
err := m.Marshal(b, r)
if err != nil {
return nil, err
}
var raw map[string]interface{}
// unmarshal the raw data to an interface
err = json.Unmarshal(b.Bytes(), &raw)
if err != nil {
return nil, err
}
return raw, nil
}
func getProto(raw map[string]interface{}) (*proto.ReallyBig, error) {
b, err := json.Marshal(raw)
if err != nil {
return nil, err
}
u := jsonpb.Unmarshaler{}
var reallyBigProto proto.ReallyBig
err = u.Unmarshal(bytes.NewReader(b), &recipeProto)
if err != nil {
return nil, err
}
return &reallyBigProto, nil
}
I implemented the Marshaler and Unmarshaler interface. Since mongo driver calls MarshalBSON and UnmarshalBSON if the type implements Marshaler and Unmarshaler we also end up in infinite loop. To avoid that we create a Alias of the type. Alias in Golang inherit only the fields not the methods so we end up calling normal bson.Marshal and bson.Unmarshal
func (r *ReallyBig) MarshalBSON() ([]byte, error) {
type ReallyBigAlias ReallyBig
reallyBigRaw, err := getRaw(r.ReallyBigObj)
if err != nil {
return nil, err
}
r.ReallyBigRaw = reallyBigRaw
return bson.Marshal((*ReallyBigAlias)(r))
}
func (r *ReallyBig) UnmarshalBSON(data []byte) error {
type ReallyBigAlias ReallyBig
err := bson.Unmarshal(data, (*ReallyBigAlias)(r))
if err != nil {
return err
}
reallyBigProto, err := getProto(r.ReallyBigRaw)
if err != nil {
return err
}
r.ReallyBigObj = reallyBigProto
return nil
}

how to create a mongo db package

I'd like to build an infrastructure, which is a package for the project. So that other developers can import this package to perform CRUD operations on the DB.
But I've got an error during the test:
type Students struct {
Name string
Age int
}
type InsertOneResult struct {
InsertedID interface{}
}
func dbGetOne(coll, document interface{}) (*InsertOneResult, error) {
...
}
func dbUpdateOne(coll, document interface{}) (*InsertOneResult, error) {
...
}
func dbDeleteOne(coll, document interface{}) (*InsertOneResult, error) {
...
}
func dbInsertOne(coll, document interface{}) (*InsertOneResult, error) {
res, err := coll.InsertOne(context.TODO(), document)
if err != nil {
log.Fatal(err)
}
return &InsertOneResult{InsertedID: res[0]}, err
}
func main() {
client, err := mongo.NewClient(options.Client().ApplyURI("mongodb://<user>:<password>#<host>:<port>/<dbname>"))
if err != nil {
log.Fatal(err)
}
ctx, _ := context.WithTimeout(context.Background(), 30*time.Second)
err = client.Connect(ctx)
if err != nil {
log.Fatal(err)
}
coll := client.Database("db").Collection("students")
data := Students{"Amy", 10}
res, err := dbInsertOne(coll, data)
if err != nil {
log.Fatal(err)
}
fmt.Printf("inserted document with ID %v\n", res.InsertedID)
}
Here's the error:
./main.go:24:18: coll.InsertOne undefined (type interface {} is interface with no methods)
Is there any way to solve this? Thanks in advance.
Hey it looks like the error could be coming from a type conversion issue. The solution would be to clearly define the type for coll as *mongo.Collection in the dbInsertOne() function. This allows the compiler at compile time to figure out the structure of the input instead of having to rely on an abstract interface.
func dbInsertOne(coll *mongo.Collection, document interface{}) (*InsertOneResult, error) {
res, err := coll.InsertOne(context.TODO(), document)
if err != nil {
log.Fatal(err)
}
return &InsertOneResult{InsertedID: res.InsertedID}, err
}
I would further suggest that the 2nd argument document should also be a typed known term if possible. e.g.
func dbInsertOne(coll *mongo.Collection, document Students)
Static typing will help quite a bit and clear up any confusion.

Deserialize cursor into array with mongo-go-driver and interface

I create an api using golang, i would like to create some functionnal test, for that i create an interface to abstract my database. But for that i need to be able to convert the cursor to an array without knowing the type.
func (self *KeyController) GetKey(c echo.Context) (err error) {
var res []dto.Key
err = db.Keys.Find(bson.M{}, 10, 0, &res)
if err != nil {
fmt.Println(err)
return c.String(http.StatusInternalServerError, "internal error")
}
c.JSON(http.StatusOK, res)
return
}
//THE FIND FUNCTION ON THE DB PACKAGE
func (s MongoCollection) Find(filter bson.M, limit int, offset int, res interface{}) (err error) {
ctx := context.Background()
var cursor *mongo.Cursor
l := int64(limit)
o := int64(offset)
objectType := reflect.TypeOf(res).Elem()
cursor, err = s.c.Find(ctx, filter, &options.FindOptions{
Limit: &l,
Skip: &o,
})
if err != nil {
return
}
defer cursor.Close(ctx)
for cursor.Next(ctx) {
result := reflect.New(objectType).Interface()
err := cursor.Decode(&result)
if err != nil {
panic(err)
}
res = append(res.([]interface{}), result)
}
return
}
Does someone have an idea?
You can call directly the "All" method:
ctx := context.Background()
err = cursor.All(ctx, res)
if err != nil {
fmt.Println(err.Error())
}
For reference:
https://godoc.org/go.mongodb.org/mongo-driver/mongo#Cursor.All
i think you want to encapsulate the Find method for mongo query.
Using the reflect package i have improved your code by adding an additional parameter that serves as a template to instantiate new instances of slice items.
func (m *MongoDbModel) FindAll(database string, colname string, obj interface{}, parameter map[string]interface{}) ([]interface{}, error) {
var list = make([]interface{}, 0)
collection, err := m.Client.Database(database).Collection(colname).Clone()
objectType := reflect.TypeOf(obj).Elem()
fmt.Println("objectype", objectType)
if err != nil {
log.Println(err)
return nil, err
}
filter := bson.M{}
filter["$and"] = []bson.M{}
for key, value := range parameter {
filter["$and"] = append(filter["$and"].([]bson.M), bson.M{key: value})
}
cur, err := collection.Find(context.Background(), filter)
if err != nil {
log.Fatal(err)
}
defer cur.Close(context.Background())
for cur.Next(context.Background()) {
result := reflect.New(objectType).Interface()
err := cur.Decode(result)
if err != nil {
log.Println(err)
return nil, err
}
list = append(list, result)
}
if err := cur.Err(); err != nil {
return nil, err
}
return list, nil
}
The difference is that FindAll method returns []interface{}, where err := cur.Decode(result) directly consumes a pointer like the result variable.