From d340db6dfde1f70d2458c1d428a15e51e4f157eb Mon Sep 17 00:00:00 2001 From: Ankit Patial Date: Sat, 26 Jul 2025 18:34:56 +0530 Subject: [PATCH] first commit --- .gitignore | 28 +++ LICENSE | 21 ++ Makefile | 4 + README.md | 5 + cmd/generate.go | 214 +++++++++++++++++ cmd/main.go | 37 +++ cmd/parse.go | 139 +++++++++++ example/db/branchuser/branch_users.go | 18 ++ example/db/comment/comments.go | 18 ++ example/db/employee/employees.go | 18 ++ example/db/post/posts.go | 18 ++ example/db/schema.go | 15 ++ example/db/user/users.go | 30 +++ example/db/usersession/user_sessions.go | 24 ++ example/generate.go | 3 + example/qry_delete_test.go | 18 ++ example/qry_insert_test.go | 84 +++++++ example/qry_select_test.go | 90 +++++++ example/qry_update_test.go | 66 +++++ example/schema.sql | 306 ++++++++++++++++++++++++ go.mod | 21 ++ go.sum | 25 ++ pgm.go | 134 +++++++++++ pool.go | 98 ++++++++ qry.go | 240 +++++++++++++++++++ qry_delete.go | 74 ++++++ qry_insert.go | 150 ++++++++++++ qry_select.go | 286 ++++++++++++++++++++++ qry_update.go | 100 ++++++++ 29 files changed, 2284 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 Makefile create mode 100644 README.md create mode 100644 cmd/generate.go create mode 100644 cmd/main.go create mode 100644 cmd/parse.go create mode 100644 example/db/branchuser/branch_users.go create mode 100644 example/db/comment/comments.go create mode 100644 example/db/employee/employees.go create mode 100644 example/db/post/posts.go create mode 100644 example/db/schema.go create mode 100644 example/db/user/users.go create mode 100644 example/db/usersession/user_sessions.go create mode 100644 example/generate.go create mode 100644 example/qry_delete_test.go create mode 100644 example/qry_insert_test.go create mode 100644 example/qry_select_test.go create mode 100644 example/qry_update_test.go create mode 100644 example/schema.sql create mode 100644 go.mod create mode 100644 go.sum create mode 100644 pgm.go create mode 100644 pool.go create mode 100644 qry.go create mode 100644 qry_delete.go create mode 100644 qry_insert.go create mode 100644 qry_select.go create mode 100644 qry_update.go diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..e06c06d --- /dev/null +++ b/.gitignore @@ -0,0 +1,28 @@ +# ---> Go +# If you prefer the allow list template instead of the deny list, see community template: +# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore +# +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll +*.so +*.dylib + +# Test binary, built with `go test -c` +*.test + +# Output of the go coverage tool, specifically when used with LiteIDE +*.out + +# Dependency directories (remove the comment below to include it) +# vendor/ + +# Go workspace file +go.work +go.work.sum + +# env file +.env + +example/local_* diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..5501b51 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2025 Patial Tech + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..081d88e --- /dev/null +++ b/Makefile @@ -0,0 +1,4 @@ +run: + go run ./cmd -o ./example/db ./example/schema.sql +bench-select: + go test ./example -bench BenchmarkSelect -memprofile memprofile.out -cpuprofile profile.out diff --git a/README.md b/README.md new file mode 100644 index 0000000..d696b93 --- /dev/null +++ b/README.md @@ -0,0 +1,5 @@ +# pgm (Postgres Mapper) + +Simple query builder to work with Go:PG apps. + +Will work along side with [dbmate](https://github.com/amacneil/dbmate), will consume schema.sql file created by dbmate diff --git a/cmd/generate.go b/cmd/generate.go new file mode 100644 index 0000000..3a03692 --- /dev/null +++ b/cmd/generate.go @@ -0,0 +1,214 @@ +// Patial Tech. +// Author, Ankit Patial + +package main + +import ( + "fmt" + "go/format" + "os" + "path/filepath" + "strings" + + "golang.org/x/text/cases" + "golang.org/x/text/language" +) + +func generate(scheamPath, outDir string) error { + // read schame.sql + f, err := os.ReadFile(scheamPath) + if err != nil { + return err + } + + // Parse scheam.sql + tbls, err := parse(f) + if err != nil { + return err + } + + // Output dir, create if not exists. + if _, err := os.Stat(outDir); os.IsNotExist(err) { + if err := os.MkdirAll(outDir, 0740); err != nil { + return err + } + } + + // schema.go will hold all tables info + var sb strings.Builder + sb.WriteString("// Code generated by code.patial.tech/go/pgm/cmd\n// DO NOT EDIT.\n\n") + sb.WriteString(fmt.Sprintf("package %s \n", filepath.Base(outDir))) + sb.WriteString(` + import "code.patial.tech/go/pgm" + + var ( +`) + caser := cases.Title(language.English) + var ( + modalDir string + fieldCount int + ) + for _, t := range tbls { + // skip schema_migrations + if t.Table == "schema_migrations" { + continue + } + + name := pluralToSingular(t.Table) + + name = strings.ReplaceAll(name, "_", " ") + name = caser.String(name) + name = strings.ReplaceAll(name, " ", "") + fieldCount = len(t.Columns) + sb.WriteString(fmt.Sprintf( + " %s = pgm.Table{Name: %q, FieldCount: %d", name, t.Table, fieldCount, + )) + if len(t.PrimaryKey) > 0 { + sb.WriteString(", PK: []string{}") + sb.WriteString("}\n") + } else { + sb.WriteString("}\n") + } + modalDir = strings.ToLower(name) + os.Mkdir(filepath.Join(outDir, modalDir), 0740) + + if err = writeColFile(t.Table, t.Columns, filepath.Join(outDir, modalDir), caser); err != nil { + return err + } + } + sb.WriteString(")") + + // Format code before saving + code, err := formatGoCode(sb.String()) + if err != nil { + return err + } + + // Save file to disk + os.WriteFile(filepath.Join(outDir, "schema.go"), code, 0640) + return nil +} + +func writeColFile(tblName string, cols []*Column, outDir string, caser cases.Caser) error { + var sb strings.Builder + sb.WriteString("// Code generated by db-gen. DO NOT EDIT.\n\n") + sb.WriteString(fmt.Sprintf("package %s\n\n", filepath.Base(outDir))) + sb.WriteString(fmt.Sprintf("import %q\n\n", "code.patial.tech/go/pgm")) + sb.WriteString("const (") + var name string + for _, c := range cols { + name = strings.ReplaceAll(c.Name, "_", " ") + name = caser.String(name) + name = strings.ReplaceAll(name, " ", "") + + if strings.HasSuffix(name, "Id") { + name = name[0:len(name)-2] + "ID" + } + + sb.WriteString(fmt.Sprintf("\n // %s field has db type %q", name, c.Type)) + sb.WriteString(fmt.Sprintf("\n %s pgm.Field = %q", name, tblName+"."+c.Name)) + } + + sb.WriteString("\n)") + + // Format code before saving + code, err := formatGoCode(sb.String()) + if err != nil { + return err + } + // Save file to disk. + return os.WriteFile(filepath.Join(outDir, tblName+".go"), code, 0640) +} + +// pluralToSingular converts plural table names to singular forms +func pluralToSingular(plural string) string { + // Handle special irregular plurals + irregulars := map[string]string{ + "people": "person", + "children": "child", + "men": "man", + "women": "woman", + "feet": "foot", + "teeth": "tooth", + "mice": "mouse", + "geese": "goose", + "oxen": "ox", + "data": "datum", + "criteria": "criterion", + "phenomena": "phenomenon", + } + + lower := strings.ToLower(plural) + if singular, exists := irregulars[lower]; exists { + return singular + } + + // Handle words ending in -ies (cities -> city, companies -> company) + if strings.HasSuffix(lower, "ies") && len(plural) > 3 { + return plural[:len(plural)-3] + "y" + } + + // Handle words ending in -ves (lives -> life, wives -> wife) + if strings.HasSuffix(lower, "ves") && len(plural) > 3 { + return plural[:len(plural)-3] + "fe" + } + + // Handle words ending in -ses (classes -> class, addresses -> address) + if strings.HasSuffix(lower, "ses") && len(plural) > 3 { + return plural[:len(plural)-2] + } + + // Handle words ending in -xes (boxes -> box, taxes -> tax) + if strings.HasSuffix(lower, "xes") && len(plural) > 3 { + return plural[:len(plural)-2] + } + + // Handle words ending in -ches (watches -> watch, benches -> bench) + if strings.HasSuffix(lower, "ches") && len(plural) > 4 { + return plural[:len(plural)-2] + } + + // Handle words ending in -shes (dishes -> dish, bushes -> bush) + if strings.HasSuffix(lower, "shes") && len(plural) > 4 { + return plural[:len(plural)-2] + } + + // Handle words ending in -oes (heroes -> hero, potatoes -> potato) + if strings.HasSuffix(lower, "oes") && len(plural) > 3 { + return plural[:len(plural)-2] + } + + // Handle general -es endings + if strings.HasSuffix(lower, "es") && len(plural) > 2 { + withoutEs := plural[:len(plural)-2] + if len(withoutEs) > 0 { + lastChar := strings.ToLower(string(withoutEs[len(withoutEs)-1])) + // Only remove -es (not just -s) if the word ends in s, x, ch, sh, o (those need the -es) + if lastChar == "s" || lastChar == "x" || lastChar == "o" || + strings.HasSuffix(strings.ToLower(withoutEs), "ch") || + strings.HasSuffix(strings.ToLower(withoutEs), "sh") { + return withoutEs + } + } + // For words like "references", "preferences" - just remove the -s + return plural[:len(plural)-1] + } + + // Handle simple -s endings (users -> user, books -> book) + if strings.HasSuffix(lower, "s") && len(plural) > 1 { + return plural[:len(plural)-1] + } + + // If no rules match, return as-is (might already be singular) + return plural +} + +// formatGoCode formats the input Go code using go/format. +func formatGoCode(str string) ([]byte, error) { + formatted, err := format.Source([]byte(str)) + if err != nil { + return nil, fmt.Errorf("formatting code: %w", err) + } + + return formatted, nil +} diff --git a/cmd/main.go b/cmd/main.go new file mode 100644 index 0000000..c391d8c --- /dev/null +++ b/cmd/main.go @@ -0,0 +1,37 @@ +// Patial Tech. +// Author, Ankit Patial + +package main + +import ( + "flag" + "fmt" + "os" +) + +const usageTxt = `Please provide output director and input schema. +Example: + pgm/cmd -o ./db ./db/schema.sql + +` + +func main() { + var outDir string + flag.StringVar(&outDir, "o", "", "-o as output directory path") + flag.Parse() + if len(os.Args) < 4 { + fmt.Print(usageTxt) + return + } + + if outDir == "" { + println("missing, -o output directory path") + os.Exit(1) + return + } + + if err := generate(os.Args[3], outDir); err != nil { + println(err.Error()) + os.Exit(1) + } +} diff --git a/cmd/parse.go b/cmd/parse.go new file mode 100644 index 0000000..9c698fc --- /dev/null +++ b/cmd/parse.go @@ -0,0 +1,139 @@ +// Patial Tech. +// Author, Ankit Patial + +package main + +import ( + "bytes" + "fmt" + "regexp" + "strings" +) + +type ( + TableInfo struct { + Schema string + Table string + Columns []*Column + PrimaryKey []string + } + + Column struct { + // Name of column + Name string + // Type of column + Type string + } +) + +func parse(scheam []byte) ([]*TableInfo, error) { + var ( + t = false + n = bytes.Count(scheam, []byte("CREATE TABLE")) + tables = make([]*TableInfo, n) + i = 0 + sb strings.Builder + ) + + for l := range bytes.SplitSeq(scheam, []byte("\n")) { + if strings.HasPrefix(string(l), "CREATE TABLE") { + t = true + sb.Write(l) + } else if t { + sb.Write(l) + if strings.Contains(string(l), ";") { + t = false + t, err := parseTableStmt(sb.String()) + if err != nil { + return nil, err + } + tables[i] = t + i++ + sb.Reset() + } + + } + } + + return tables, nil +} + +func parseTableStmt(sqlStatement string) (*TableInfo, error) { + // Regex to match CREATE TABLE statement and extract table name and column definitions + tableRegex := regexp.MustCompile(`(?i)CREATE\s+TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:` + "`" + `?(\w+)` + "`" + `?\.)?` + "`" + `?(\w+)` + "`" + `?\s*\(([\s\S]*?)\)(?:\s*ENGINE.*?)?;`) + matches := tableRegex.FindStringSubmatch(sqlStatement) + if matches == nil { + return nil, fmt.Errorf("no CREATE TABLE statement found") + } + + schema := matches[1] + tbl := matches[2] + cols := matches[3] + + // Parse column definitions by splitting first on commas + // This is a simplistic approach - a real SQL parser would be more robust + columns := parseColumns(cols) + + return &TableInfo{ + Schema: schema, + Table: tbl, + Columns: columns, + }, nil +} + +func parseColumns(colsStr string) []*Column { + var columns []*Column + var currentDef strings.Builder + parenthesesCount := 0 + inQuote := false + + // First, intelligently split the column definitions + var columnDefs []string + for _, char := range colsStr { + if char == '(' { + parenthesesCount++ + currentDef.WriteRune(char) + } else if char == ')' { + parenthesesCount-- + currentDef.WriteRune(char) + } else if char == '\'' || char == '"' { + inQuote = !inQuote + currentDef.WriteRune(char) + } else if char == ',' && parenthesesCount == 0 && !inQuote { + // End of a column definition + columnDefs = append(columnDefs, strings.TrimSpace(currentDef.String())) + currentDef.Reset() + } else { + currentDef.WriteRune(char) + } + } + + // Add the last column definition if there's anything left + if currentDef.Len() > 0 { + columnDefs = append(columnDefs, strings.TrimSpace(currentDef.String())) + } + + // Now parse each column definition + for _, colDef := range columnDefs { + // Skip constraints and keys that don't define columns + if strings.HasPrefix(strings.ToUpper(colDef), "PRIMARY KEY") || + strings.HasPrefix(strings.ToUpper(colDef), "UNIQUE KEY") || + strings.HasPrefix(strings.ToUpper(colDef), "FOREIGN KEY") || + strings.HasPrefix(strings.ToUpper(colDef), "KEY") || + strings.HasPrefix(strings.ToUpper(colDef), "INDEX") || + strings.HasPrefix(strings.ToUpper(colDef), "CONSTRAINT") { + continue + } + + colNameRegex := regexp.MustCompile(`^` + "`" + `?(\w+)` + "`" + `?\s+(.+)$`) + matches := colNameRegex.FindStringSubmatch(colDef) + if matches != nil { + columns = append(columns, &Column{ + Name: matches[1], + Type: strings.TrimSpace(matches[2]), + }) + } + } + + return columns +} diff --git a/example/db/branchuser/branch_users.go b/example/db/branchuser/branch_users.go new file mode 100644 index 0000000..bc6335e --- /dev/null +++ b/example/db/branchuser/branch_users.go @@ -0,0 +1,18 @@ +// Code generated by db-gen. DO NOT EDIT. + +package branchuser + +import "code.patial.tech/go/pgm" + +const ( + // BranchID field has db type "bigint NOT NULL" + BranchID pgm.Field = "branch_users.branch_id" + // UserID field has db type "bigint NOT NULL" + UserID pgm.Field = "branch_users.user_id" + // RoleID field has db type "smallint NOT NULL" + RoleID pgm.Field = "branch_users.role_id" + // CreatedAt field has db type "timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL" + CreatedAt pgm.Field = "branch_users.created_at" + // UpdatedAt field has db type "timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL" + UpdatedAt pgm.Field = "branch_users.updated_at" +) diff --git a/example/db/comment/comments.go b/example/db/comment/comments.go new file mode 100644 index 0000000..e14c83b --- /dev/null +++ b/example/db/comment/comments.go @@ -0,0 +1,18 @@ +// Code generated by db-gen. DO NOT EDIT. + +package comment + +import "code.patial.tech/go/pgm" + +const ( + // ID field has db type "integer NOT NULL" + ID pgm.Field = "comments.id" + // PostID field has db type "integer NOT NULL" + PostID pgm.Field = "comments.post_id" + // UserID field has db type "integer NOT NULL" + UserID pgm.Field = "comments.user_id" + // Content field has db type "text NOT NULL" + Content pgm.Field = "comments.content" + // CreatedAt field has db type "timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP" + CreatedAt pgm.Field = "comments.created_at" +) diff --git a/example/db/employee/employees.go b/example/db/employee/employees.go new file mode 100644 index 0000000..66ccbe3 --- /dev/null +++ b/example/db/employee/employees.go @@ -0,0 +1,18 @@ +// Code generated by db-gen. DO NOT EDIT. + +package employee + +import "code.patial.tech/go/pgm" + +const ( + // ID field has db type "integer NOT NULL" + ID pgm.Field = "employees.id" + // Name field has db type "var NOT NULL" + Name pgm.Field = "employees.name" + // Department field has db type "var NOT NULL" + Department pgm.Field = "employees.department" + // Salary field has db type "decimal(10,2)" + Salary pgm.Field = "employees.salary" + // CreatedAt field has db type "timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP updated_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP" + CreatedAt pgm.Field = "employees.created_at" +) diff --git a/example/db/post/posts.go b/example/db/post/posts.go new file mode 100644 index 0000000..c0920a2 --- /dev/null +++ b/example/db/post/posts.go @@ -0,0 +1,18 @@ +// Code generated by db-gen. DO NOT EDIT. + +package post + +import "code.patial.tech/go/pgm" + +const ( + // ID field has db type "integer NOT NULL" + ID pgm.Field = "posts.id" + // UserID field has db type "integer NOT NULL" + UserID pgm.Field = "posts.user_id" + // Title field has db type "character varying(255) NOT NULL" + Title pgm.Field = "posts.title" + // Content field has db type "text" + Content pgm.Field = "posts.content" + // CreatedAt field has db type "timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP" + CreatedAt pgm.Field = "posts.created_at" +) diff --git a/example/db/schema.go b/example/db/schema.go new file mode 100644 index 0000000..03fa8c8 --- /dev/null +++ b/example/db/schema.go @@ -0,0 +1,15 @@ +// Code generated by code.patial.tech/go/pgm/cmd +// DO NOT EDIT. + +package db + +import "code.patial.tech/go/pgm" + +var ( + User = pgm.Table{Name: "users", FieldCount: 11} + UserSession = pgm.Table{Name: "user_sessions", FieldCount: 8} + BranchUser = pgm.Table{Name: "branch_users", FieldCount: 5} + Post = pgm.Table{Name: "posts", FieldCount: 5} + Comment = pgm.Table{Name: "comments", FieldCount: 5} + Employee = pgm.Table{Name: "employees", FieldCount: 5} +) diff --git a/example/db/user/users.go b/example/db/user/users.go new file mode 100644 index 0000000..7f751b9 --- /dev/null +++ b/example/db/user/users.go @@ -0,0 +1,30 @@ +// Code generated by db-gen. DO NOT EDIT. + +package user + +import "code.patial.tech/go/pgm" + +const ( + // ID field has db type "integer NOT NULL" + ID pgm.Field = "users.id" + // Name field has db type "character varying(255) NOT NULL" + Name pgm.Field = "users.name" + // Email field has db type "character varying(255) NOT NULL" + Email pgm.Field = "users.email" + // Phone field has db type "character varying(20)" + Phone pgm.Field = "users.phone" + // FirstName field has db type "character varying(50) NOT NULL" + FirstName pgm.Field = "users.first_name" + // MiddleName field has db type "character varying(50)" + MiddleName pgm.Field = "users.middle_name" + // LastName field has db type "character varying(50) NOT NULL" + LastName pgm.Field = "users.last_name" + // StatusID field has db type "smallint" + StatusID pgm.Field = "users.status_id" + // MfaKind field has db type "character varying(50) DEFAULT 'None'::character varying" + MfaKind pgm.Field = "users.mfa_kind" + // CreatedAt field has db type "timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP" + CreatedAt pgm.Field = "users.created_at" + // UpdatedAt field has db type "timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP" + UpdatedAt pgm.Field = "users.updated_at" +) diff --git a/example/db/usersession/user_sessions.go b/example/db/usersession/user_sessions.go new file mode 100644 index 0000000..266373c --- /dev/null +++ b/example/db/usersession/user_sessions.go @@ -0,0 +1,24 @@ +// Code generated by db-gen. DO NOT EDIT. + +package usersession + +import "code.patial.tech/go/pgm" + +const ( + // ID field has db type "character varying NOT NULL" + ID pgm.Field = "user_sessions.id" + // CreatedAt field has db type "timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL" + CreatedAt pgm.Field = "user_sessions.created_at" + // UpdatedAt field has db type "timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL" + UpdatedAt pgm.Field = "user_sessions.updated_at" + // ExpiresAt field has db type "timestamp with time zone NOT NULL" + ExpiresAt pgm.Field = "user_sessions.expires_at" + // BranchID field has db type "bigint" + BranchID pgm.Field = "user_sessions.branch_id" + // UserID field has db type "bigint NOT NULL" + UserID pgm.Field = "user_sessions.user_id" + // RoleID field has db type "smallint" + RoleID pgm.Field = "user_sessions.role_id" + // LoginIp field has db type "character varying NOT NULL" + LoginIp pgm.Field = "user_sessions.login_ip" +) diff --git a/example/generate.go b/example/generate.go new file mode 100644 index 0000000..67c8a4b --- /dev/null +++ b/example/generate.go @@ -0,0 +1,3 @@ +//go:generate go run code.patial.tech/go/pgm/cmd -o ./db ./schema.sql + +package example diff --git a/example/qry_delete_test.go b/example/qry_delete_test.go new file mode 100644 index 0000000..2a6315d --- /dev/null +++ b/example/qry_delete_test.go @@ -0,0 +1,18 @@ +package example + +import ( + "testing" + + "code.patial.tech/go/pgm/example/db" + "code.patial.tech/go/pgm/example/db/user" +) + +func TestDelete(t *testing.T) { + expected := "DELETE FROM users WHERE users.id = $1 AND users.status_id NOT IN($2)" + got := db.User.Delete(). + Where(user.ID.Eq(1), user.StatusID.NotIn(1, 2, 3)). + String() + if got != expected { + t.Errorf("got %q, want %q", got, expected) + } +} diff --git a/example/qry_insert_test.go b/example/qry_insert_test.go new file mode 100644 index 0000000..9b3a1db --- /dev/null +++ b/example/qry_insert_test.go @@ -0,0 +1,84 @@ +package example + +import ( + "testing" + + "code.patial.tech/go/pgm" + "code.patial.tech/go/pgm/example/db" + "code.patial.tech/go/pgm/example/db/user" +) + +func TestInsertQuery(t *testing.T) { + got := db.User.Insert(). + Set(user.Email, "aa@aa.com"). + Set(user.Phone, 8889991234). + Set(user.FirstName, "fname"). + Set(user.LastName, "lname"). + Returning(user.ID). + String() + + expected := "INSERT INTO users(email, phone, first_name, last_name) VALUES($1, $2, $3, $4) RETURNING id" + if got != expected { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +func TestInsertSetMap(t *testing.T) { + got := db.User.Insert(). + SetMap(map[pgm.Field]any{ + user.Email: "aa@aa.com", + user.Phone: 8889991234, + user.FirstName: "fname", + user.LastName: "lname", + }). + Returning(user.ID). + String() + + expected := "INSERT INTO users(email, phone, first_name, last_name) VALUES($1, $2, $3, $4) RETURNING id" + if got != expected { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +func TestInsertQuery2(t *testing.T) { + got := db.User.Insert(). + Set(user.Email, "aa@aa.com"). + Set(user.Phone, 8889991234). + OnConflict(user.ID). + DoNothing(). + String() + expected := "INSERT INTO users(email, phone) VALUES($1, $2) ON CONFLICT (id) DO NOTHING" + if got != expected { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +// BenchmarkInsertQuery-12 1952412 605.3 ns/op 1144 B/op 18 allocs/op +func BenchmarkInsertQuery(b *testing.B) { + for b.Loop() { + _ = db.User.Insert(). + Set(user.Email, "aa@aa.com"). + Set(user.Phone, 8889991234). + Set(user.FirstName, "fname"). + Set(user.LastName, "lname"). + Returning(user.ID). + String() + + } +} + +// BenchmarkInsertSetMap-12 1534039 777.1 ns/op 1480 B/op 20 allocs/op +func BenchmarkInsertSetMap(b *testing.B) { + for b.Loop() { + _ = db.User.Insert(). + SetMap(map[pgm.Field]any{ + user.Email: "aa@aa.com", + user.Phone: 8889991234, + user.FirstName: "fname", + user.LastName: "lname", + }). + Returning(user.ID). + String() + + } +} diff --git a/example/qry_select_test.go b/example/qry_select_test.go new file mode 100644 index 0000000..5c4b9b3 --- /dev/null +++ b/example/qry_select_test.go @@ -0,0 +1,90 @@ +package example + +import ( + "testing" + + "code.patial.tech/go/pgm" + "code.patial.tech/go/pgm/example/db" + "code.patial.tech/go/pgm/example/db/branchuser" + "code.patial.tech/go/pgm/example/db/employee" + "code.patial.tech/go/pgm/example/db/user" + "code.patial.tech/go/pgm/example/db/usersession" +) + +func TestQryBuilder2(t *testing.T) { + got := db.User.Debug().Select(user.Email, user.FirstName). + Join(db.UserSession, user.ID, usersession.UserID). + Join(db.BranchUser, user.ID, branchuser.UserID). + Where( + user.ID.Eq(1), + pgm.Or( + user.StatusID.Eq(2), + user.UpdatedAt.Eq(3), + ), + user.MfaKind.Eq(4), + pgm.Or( + user.FirstName.Eq(5), + user.MiddleName.Eq(6), + ), + ). + Where( + user.LastName.NEq(7), + user.Phone.Like("%123%"), + user.Email.NotInSubQuery(db.User.Select(user.ID).Where(user.ID.Eq(123))), + ). + Limit(10). + Offset(100). + String() + + expected := "SELECT users.email, users.first_name FROM users JOIN user_sessions ON users.id = user_sessions.user_id" + + " JOIN branch_users ON users.id = branch_users.user_id WHERE users.id = $1 AND (users.status_id = $2 OR users.updated_at = $3)" + + " AND users.mfa_kind = $4 AND (users.first_name = $5 OR users.middle_name = $6) AND users.last_name != $7 AND users.phone" + + " LIKE $8 AND users.email NOT IN(SELECT users.id FROM users WHERE users.id = $9) LIMIT 10 OFFSET 100" + if expected != got { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +func TestSelectWithHaving(t *testing.T) { + expected := "SELECT employees.department, AVG(employees.salary), COUNT(employees.id)" + + " FROM employees GROUP BY employees.department HAVING AVG(employees.salary) > $1 AND COUNT(employees.id) > $2" + got := db.Employee. + Select(employee.Department, employee.Salary.Avg(), employee.ID.Count()). + GroupBy(employee.Department). + Having(employee.Salary.Avg().Gt(50000), employee.ID.Count().Gt(5)). + String() + + if expected != got { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +// BenchmarkSelect-12 668817 1753 ns/op 4442 B/op 59 allocs/op +// BenchmarkSelect-12 638901 1860 ns/op 4266 B/op 61 allocs/op +func BenchmarkSelect(b *testing.B) { + for b.Loop() { + _ = db.User.Select(user.Email, user.FirstName). + Join(db.UserSession, user.ID, usersession.UserID). + Join(db.BranchUser, user.ID, branchuser.UserID). + Where( + user.ID.Eq(1), + pgm.Or( + user.StatusID.Eq(2), + user.UpdatedAt.Eq(3), + ), + user.MfaKind.Eq(4), + pgm.Or( + user.FirstName.Eq(5), + user.MiddleName.Eq(6), + ), + ). + Where( + user.LastName.NEq(7), + user.Phone.Like("%123%"), + user.Email.NotInSubQuery(db.User.Select(user.ID).Where(user.ID.Eq(123))), + ). + Limit(10). + Offset(100). + String() + } +} diff --git a/example/qry_update_test.go b/example/qry_update_test.go new file mode 100644 index 0000000..df44e72 --- /dev/null +++ b/example/qry_update_test.go @@ -0,0 +1,66 @@ +package example + +import ( + "testing" + + "code.patial.tech/go/pgm" + "code.patial.tech/go/pgm/example/db" + "code.patial.tech/go/pgm/example/db/user" +) + +func TestUpdateQuery(t *testing.T) { + got := db.User.Update(). + Set(user.FirstName, "ankit"). + Set(user.MiddleName, "singh"). + Set(user.LastName, "patial"). + Where( + user.Email.Eq("aa@aa.com"), + ). + Where( + user.StatusID.NEq(1), + ). + String() + + expected := "UPDATE users SET first_name=$1, middle_name=$2, last_name=$3 WHERE users.email = $4 AND users.status_id != $5" + if got != expected { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +func TestUpdateSetMap(t *testing.T) { + got := db.User.Update(). + SetMap(map[pgm.Field]any{ + user.FirstName: "ankit", + user.MiddleName: "singh", + user.LastName: "patial", + }). + Where( + user.Email.Eq("aa@aa.com"), + ). + Where( + user.StatusID.NEq(1), + ). + String() + + expected := "UPDATE users SET first_name=$1, middle_name=$2, last_name=$3 WHERE users.email = $4 AND users.status_id != $5" + if got != expected { + t.Errorf("\nexpected: %q\ngot: %q", expected, got) + } +} + +// BenchmarkUpdateQuery-12 2004985 592.2 ns/op 1176 B/op 20 allocs/op +func BenchmarkUpdateQuery(b *testing.B) { + for b.Loop() { + _ = db.User.Update(). + Set(user.FirstName, "ankit"). + Set(user.MiddleName, "singh"). + Set(user.LastName, "patial"). + Where( + user.Email.Eq("aa@aa.com"), + ). + Where( + user.StatusID.NEq(1), + ). + String() + } +} diff --git a/example/schema.sql b/example/schema.sql new file mode 100644 index 0000000..8e2f9fd --- /dev/null +++ b/example/schema.sql @@ -0,0 +1,306 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 15.3 +-- Dumped by pg_dump version 15.3 + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: myapp_development; Type: DATABASE; Schema: -; Owner: postgres +-- + +CREATE DATABASE myapp_development WITH TEMPLATE = template0 ENCODING = 'UTF8' LOCALE = 'en_US.UTF-8'; + +ALTER DATABASE myapp_development OWNER TO postgres; + +\connect myapp_development + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: schema_migrations; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.schema_migrations ( + version character varying(255) NOT NULL +); + +ALTER TABLE public.schema_migrations OWNER TO postgres; + +-- +-- Name: users; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.users ( + id integer NOT NULL, + name character varying(255) NOT NULL, + email character varying(255) NOT NULL, + phone character varying(20), + first_name character varying(50) NOT NULL, + middle_name character varying(50), + last_name character varying(50) NOT NULL, + status_id smallint, + mfa_kind character varying(50) DEFAULT 'None'::character varying, + created_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP, + updated_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +ALTER TABLE public.users OWNER TO postgres; + + +CREATE TABLE public.user_sessions ( + id character varying NOT NULL, + created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + expires_at timestamp with time zone NOT NULL, + branch_id bigint, + user_id bigint NOT NULL, + role_id smallint, + login_ip character varying NOT NULL, +); + +CREATE TABLE public.branch_users ( + branch_id bigint NOT NULL, + user_id bigint NOT NULL, + role_id smallint NOT NULL, + created_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL, + updated_at timestamp with time zone DEFAULT CURRENT_TIMESTAMP NOT NULL +); + +-- +-- Name: users_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.users_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER TABLE public.users_id_seq OWNER TO postgres; + +-- +-- Name: users_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.users_id_seq OWNED BY public.users.id; + +-- +-- Name: posts; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.posts ( + id integer NOT NULL, + user_id integer NOT NULL, + title character varying(255) NOT NULL, + content text, + created_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +ALTER TABLE public.posts OWNER TO postgres; + +-- +-- Name: posts_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.posts_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER TABLE public.posts_id_seq OWNER TO postgres; + +-- +-- Name: posts_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.posts_id_seq OWNED BY public.posts.id; + +-- +-- Name: comments; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.comments ( + id integer NOT NULL, + post_id integer NOT NULL, + user_id integer NOT NULL, + content text NOT NULL, + created_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP +); + +ALTER TABLE public.comments OWNER TO postgres; + +CREATE TABLE public.employees ( + id integer NOT NULL, + name var NOT NULL, + department var NOT NULL, + salary decimal(10,2) , + created_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP + updated_at timestamp without time zone NOT NULL DEFAULT CURRENT_TIMESTAMP +); +-- +-- Name: comments_id_seq; Type: SEQUENCE; Schema: public; Owner: postgres +-- + +CREATE SEQUENCE public.comments_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + +ALTER TABLE public.comments_id_seq OWNER TO postgres; + +-- +-- Name: comments_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: postgres +-- + +ALTER SEQUENCE public.comments_id_seq OWNED BY public.comments.id; + +-- +-- Name: users id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users ALTER COLUMN id SET DEFAULT nextval('public.users_id_seq'::regclass); + +-- +-- Name: posts id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts ALTER COLUMN id SET DEFAULT nextval('public.posts_id_seq'::regclass); + +-- +-- Name: comments id; Type: DEFAULT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.comments ALTER COLUMN id SET DEFAULT nextval('public.comments_id_seq'::regclass); + +-- +-- Name: schema_migrations schema_migrations_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.schema_migrations + ADD CONSTRAINT schema_migrations_pkey PRIMARY KEY (version); + +-- +-- Name: users users_email_key; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_email_key UNIQUE (email); + +-- +-- Name: users users_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.users + ADD CONSTRAINT users_pkey PRIMARY KEY (id); + +-- +-- Name: posts posts_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT posts_pkey PRIMARY KEY (id); + +-- +-- Name: comments comments_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.comments + ADD CONSTRAINT comments_pkey PRIMARY KEY (id); + +-- +-- Name: idx_comments_post_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_comments_post_id ON public.comments USING btree (post_id); + +-- +-- Name: idx_comments_user_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_comments_user_id ON public.comments USING btree (user_id); + +-- +-- Name: idx_posts_user_id; Type: INDEX; Schema: public; Owner: postgres +-- + +CREATE INDEX idx_posts_user_id ON public.posts USING btree (user_id); + +-- +-- Name: posts posts_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.posts + ADD CONSTRAINT posts_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +-- +-- Name: comments comments_post_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.comments + ADD CONSTRAINT comments_post_id_fkey FOREIGN KEY (post_id) REFERENCES public.posts(id) ON DELETE CASCADE; + +-- +-- Name: comments comments_user_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.comments + ADD CONSTRAINT comments_user_id_fkey FOREIGN KEY (user_id) REFERENCES public.users(id) ON DELETE CASCADE; + +-- +-- Data for Name: schema_migrations; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +COPY public.schema_migrations (version) FROM stdin; +20200227231541 +20200227231542 +20200227231543 +\. + +-- +-- Data for Name: users; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +COPY public.users (id, name, email, created_at) FROM stdin; +\. + +-- +-- Data for Name: posts; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +COPY public.posts (id, user_id, title, content, created_at) FROM stdin; +\. + +-- +-- Data for Name: comments; Type: TABLE DATA; Schema: public; Owner: postgres +-- + +COPY public.comments (id, post_id, user_id, content, created_at) diff --git a/go.mod b/go.mod new file mode 100644 index 0000000..bb02429 --- /dev/null +++ b/go.mod @@ -0,0 +1,21 @@ +module code.patial.tech/go/pgm + +go 1.24.5 + +require ( + github.com/jackc/pgx v3.6.2+incompatible + golang.org/x/text v0.27.0 +) + +require ( + github.com/jackc/pgpassfile v1.0.0 // indirect + github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect + github.com/jackc/puddle/v2 v2.2.2 // indirect + golang.org/x/sync v0.16.0 // indirect +) + +require ( + github.com/jackc/pgx/v5 v5.7.5 + github.com/pkg/errors v0.9.1 // indirect + golang.org/x/crypto v0.40.0 // indirect +) diff --git a/go.sum b/go.sum new file mode 100644 index 0000000..1ac51ac --- /dev/null +++ b/go.sum @@ -0,0 +1,25 @@ +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= +github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= +github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= +github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o= +github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I= +github.com/jackc/pgx/v5 v5.7.5 h1:JHGfMnQY+IEtGM63d+NGMjoRpysB2JBwDr5fsngwmJs= +github.com/jackc/pgx/v5 v5.7.5/go.mod h1:aruU7o91Tc2q2cFp5h4uP3f6ztExVpyVv88Xl/8Vl8M= +github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= +github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +golang.org/x/crypto v0.40.0 h1:r4x+VvoG5Fm+eJcxMaY8CQM7Lb0l1lsmjGBQ6s8BfKM= +golang.org/x/crypto v0.40.0/go.mod h1:Qr1vMER5WyS2dfPHAlsOj01wgLbsyWtFn/aY+5+ZdxY= +golang.org/x/sync v0.16.0 h1:ycBJEhp9p4vXvUZNszeOq0kGTPghopOL8q0fq3vstxw= +golang.org/x/sync v0.16.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= +golang.org/x/text v0.27.0 h1:4fGWRpyh641NLlecmyl4LOe6yDdfaYNrGb2zdfo4JV4= +golang.org/x/text v0.27.0/go.mod h1:1D28KMCvyooCX9hBiosv5Tz/+YLxj0j7XhWjpSUF7CU= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/pgm.go b/pgm.go new file mode 100644 index 0000000..ead183a --- /dev/null +++ b/pgm.go @@ -0,0 +1,134 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "errors" + "strings" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgtype" +) + +// Table in database +type Table struct { + Name string + PK []string + FieldCount uint16 + debug bool +} + +// Debug when set true will print generated query string in stdout +func (t Table) Debug() Clause { + t.debug = true + return t +} + +// +// Field ==> +// + +// Field related to a table +type Field string + +func (f Field) Name() string { + return strings.Split(string(f), ".")[1] +} + +func (f Field) String() string { + return string(f) +} + +// Count fn wrapping of field +func (f Field) Count() Field { + return Field("COUNT(" + f.String() + ")") +} + +// Avg fn wrapping of field +func (f Field) Avg() Field { + return Field("AVG(" + f.String() + ")") +} + +func (f Field) Eq(val any) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " = $", len: len(col) + 5} +} + +// EqualFold will user LOWER() for comparision +func (f Field) EqFold(val any) Conditioner { + col := f.String() + return &Cond{Field: "LOWER(" + col + ")", Val: val, op: " = LOWER($", action: CondActionNeedToClose, len: len(col) + 5} +} + +func (f Field) NEq(val any) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " != $", len: len(col) + 5} +} + +func (f Field) Gt(val any) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " > $", len: len(col) + 5} +} + +func (f Field) Gte(val any) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " >= $", len: len(col) + 5} +} + +func (f Field) Like(val string) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " LIKE $", len: len(f.String()) + 5} +} + +func (f Field) LikeFold(val string) Conditioner { + col := f.String() + return &Cond{Field: "LOWER(" + col + ")", Val: val, op: " LIKE LOWER($", action: CondActionNeedToClose, len: len(col) + 5} +} + +// ILIKE is case-insensitive +func (f Field) ILike(val string) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " ILIKE $", len: len(col) + 5} +} + +func (f Field) NotIn(val ...any) Conditioner { + col := f.String() + return &Cond{Field: col, Val: val, op: " NOT IN($", action: CondActionNeedToClose, len: len(col) + 5} +} + +func (f Field) NotInSubQuery(qry WhereClause) Conditioner { + col := f.String() + return &Cond{Field: col, Val: qry, op: " NOT IN($)", action: CondActionSubQuery} +} + +// +// Helper func ==> +// + +// PgTime as in UTC +func PgTime(t time.Time) pgtype.Timestamptz { + return pgtype.Timestamptz{Time: t, Valid: true} +} + +func PgTimeNow() pgtype.Timestamptz { + return pgtype.Timestamptz{Time: time.Now(), Valid: true} +} + +// IsNotFound error check +func IsNotFound(err error) bool { + return errors.Is(err, pgx.ErrNoRows) +} + +func ConcatWs(sep string, fields ...Field) string { + return "concat_ws('" + sep + "'," + joinFileds(fields) + ")" +} + +func StringAgg(exp, sep string) string { + return "string_agg(" + exp + ",'" + sep + "')" +} + +func StringAggCast(exp, sep string) string { + return "string_agg(cast(" + exp + " as varchar),'" + sep + "')" +} diff --git a/pool.go b/pool.go new file mode 100644 index 0000000..38624ea --- /dev/null +++ b/pool.go @@ -0,0 +1,98 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "context" + "errors" + "log/slog" + "strings" + "sync" + "sync/atomic" + "time" + + "github.com/jackc/pgx/v5" + "github.com/jackc/pgx/v5/pgxpool" +) + +var ( + poolPGX atomic.Pointer[pgxpool.Pool] + poolStringBuilder = sync.Pool{ + New: func() any { + return new(strings.Builder) + }, + } + + ErrInitTX = errors.New("failed to init db.tx") + ErrCommitTX = errors.New("failed to commit db.tx") + ErrNoRows = errors.New("no data found") +) + +type Config struct { + MaxConns int32 + MinConns int32 + MaxConnLifetime time.Duration + MaxConnIdleTime time.Duration +} + +func Init(connString string, conf *Config) { + cfg, err := pgxpool.ParseConfig(connString) + if err != nil { + panic(err) + } + + if conf != nil { + if conf.MaxConns > 0 { + cfg.MaxConns = conf.MaxConns // 100 + } + + if conf.MinConns > 0 { + cfg.MinConns = conf.MaxConns // 5 + } + + if conf.MaxConnLifetime > 0 { + cfg.MaxConnLifetime = conf.MaxConnLifetime // time.Minute * 10 + } + + if conf.MaxConnIdleTime > 0 { + cfg.MaxConnIdleTime = conf.MaxConnIdleTime // time.Minute * 5 + } + } + + p, err := pgxpool.NewWithConfig(context.Background(), cfg) + if err != nil { + panic(err) + } + + if err = p.Ping(context.Background()); err != nil { + panic(err) + } + + poolPGX.Store(p) +} + +func GetPool() *pgxpool.Pool { + return poolPGX.Load() +} + +// get string builder from pool +func getSB() *strings.Builder { + return poolStringBuilder.Get().(*strings.Builder) +} + +// put string builder back to pool +func putSB(sb *strings.Builder) { + sb.Reset() + poolStringBuilder.Put(sb) +} + +func BeginTx(ctx context.Context) (pgx.Tx, error) { + tx, err := poolPGX.Load().Begin(ctx) + if err != nil { + slog.Error(err.Error()) + return nil, errors.New("failed to open db tx") + } + + return tx, err +} diff --git a/qry.go b/qry.go new file mode 100644 index 0000000..0203a48 --- /dev/null +++ b/qry.go @@ -0,0 +1,240 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "context" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +type ( + Clause interface { + Select(fields ...Field) SelectClause + // Insert() InsertSet + // Update() UpdateSet + // Delete() WhereOrExec + } + + SelectClause interface { + // Join and Inner Join are same + Join(m Table, t1Field, t2Field Field) SelectClause + LeftJoin(m Table, t1Field, t2Field Field) SelectClause + RightJoin(m Table, t1Field, t2Field Field) SelectClause + FullJoin(m Table, t1Field, t2Field Field) SelectClause + CrossJoin(m Table) SelectClause + WhereClause + OrderByClause + GroupByClause + LimitClause + OffsetClause + Query + raw(prefixArgs []any) (string, []any) + } + + WhereClause interface { + Where(cond ...Conditioner) AfterWhere + } + + AfterWhere interface { + WhereClause + GroupByClause + OrderByClause + LimitClause + OffsetClause + Query + } + + GroupByClause interface { + GroupBy(fields ...Field) AfterGroupBy + } + + AfterGroupBy interface { + HavinClause + OrderByClause + LimitClause + OffsetClause + Query + } + + HavinClause interface { + Having(cond ...Conditioner) AfterHaving + } + + AfterHaving interface { + OrderByClause + LimitClause + OffsetClause + Query + } + + OrderByClause interface { + OrderBy(fields ...Field) AfterOrderBy + } + + AfterOrderBy interface { + LimitClause + OffsetClause + Query + } + + LimitClause interface { + Limit(v int) AfterLimit + } + + AfterLimit interface { + OffsetClause + Query + } + + OffsetClause interface { + Offset(v int) AfterOffset + } + + AfterOffset interface { + LimitClause + Query + } + + Conditioner interface { + Condition(args *[]any, idx int) string + } + + Insert interface { + Set(field Field, val any) InsertClause + SetMap(fields map[Field]any) InsertClause + } + + InsertClause interface { + Insert + Returning(field Field) First + OnConflict(fields ...Field) Do + Execute + Stringer + } + + Do interface { + DoNothing() Execute + DoUpdate(fields ...Field) Execute + } + + Update interface { + Set(field Field, val any) UpdateClause + SetMap(fields map[Field]any) UpdateClause + } + + UpdateClause interface { + Update + Where(cond ...Conditioner) WhereOrExec + } + + WhereOrExec interface { + Where(cond ...Conditioner) WhereOrExec + Execute + } + + Query interface { + First + All + Stringer + } + + First interface { + First(ctx context.Context, dest ...any) error + FirstTx(ctx context.Context, tx pgx.Tx, dest ...any) error + Stringer + } + + All interface { + // Query rows + // + // don't forget to close() rows + All(ctx context.Context, rows RowsCb) error + // Query rows + // + // don't forget to close() rows + AllTx(ctx context.Context, tx pgx.Tx, rows RowsCb) error + } + + Execute interface { + Exec(ctx context.Context) error + ExecTx(ctx context.Context, tx pgx.Tx) error + Stringer + } + + Stringer interface { + String() string + } + + RowScanner interface { + Scan(dest ...any) error + } + + RowsCb func(row RowScanner) error +) + +func joinFileds(fields []Field) string { + sb := getSB() + defer putSB(sb) + for i, f := range fields { + if i == 0 { + sb.WriteString(f.String()) + } else { + sb.WriteString(", ") + sb.WriteString(f.String()) + } + } + + return sb.String() +} + +func And(cond ...Conditioner) Conditioner { + return &CondGroup{op: " AND ", cond: cond} +} + +func Or(cond ...Conditioner) Conditioner { + return &CondGroup{op: " OR ", cond: cond} +} + +func (cv *Cond) Condition(args *[]any, argIdx int) string { + // 1. condition with subquery + if cv.action == CondActionSubQuery { + qStr, newArgs := cv.Val.(SelectClause).raw(*args) + *args = newArgs + return cv.Field + strings.Replace(cv.op, "$", qStr, 1) + } + + // 2. normal condition + *args = append(*args, cv.Val) + var op string + if strings.HasSuffix(cv.op, "$") { + op = cv.op + strconv.Itoa(argIdx+1) + } else { + op = strings.Replace(cv.op, "$", "$"+strconv.Itoa(argIdx+1), 1) + } + + if cv.action == CondActionNeedToClose { + return cv.Field + op + ")" + } + return cv.Field + op +} + +func (c *CondGroup) Condition(args *[]any, argIdx int) string { + sb := getSB() + defer putSB(sb) + + sb.WriteString("(") + for i, cond := range c.cond { + if i == 0 { + sb.WriteString(cond.Condition(args, argIdx+i)) + } else { + sb.WriteString(c.op) + sb.WriteString(cond.Condition(args, argIdx+i)) + } + } + sb.WriteString(")") + return sb.String() +} diff --git a/qry_delete.go b/qry_delete.go new file mode 100644 index 0000000..47c7fc8 --- /dev/null +++ b/qry_delete.go @@ -0,0 +1,74 @@ +package pgm + +import ( + "context" + + "github.com/jackc/pgx/v5" +) + +type ( + deleteQry struct { + table string + condition []Conditioner + args []any + debug bool + } +) + +func (t *Table) Delete() WhereOrExec { + qb := &deleteQry{ + table: t.Name, + debug: t.debug, + } + + return qb +} + +func (q *deleteQry) Where(cond ...Conditioner) WhereOrExec { + q.condition = append(q.condition, cond...) + return q +} + +func (q *deleteQry) Exec(ctx context.Context) error { + _, err := poolPGX.Load().Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + return nil +} + +func (q *deleteQry) ExecTx(ctx context.Context, tx pgx.Tx) error { + _, err := tx.Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + + return nil +} + +func (q *deleteQry) String() string { + sb := getSB() + defer putSB(sb) + + n := len("DELETE FROM ") + len(q.table) + 20 + sb.Grow(n) + + // DELETE + sb.WriteString("DELETE FROM ") + sb.WriteString(q.table) + + // WHERE + if len(q.condition) > 0 { + sb.WriteString(" WHERE ") + var argIdx int + for i, c := range q.condition { + argIdx = len(q.args) + if i > 0 { + sb.WriteString(" AND ") + } + sb.WriteString(c.Condition(&q.args, argIdx)) + } + } + + return sb.String() +} diff --git a/qry_insert.go b/qry_insert.go new file mode 100644 index 0000000..8fb4ed8 --- /dev/null +++ b/qry_insert.go @@ -0,0 +1,150 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "context" + "fmt" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +type insertQry struct { + returing *string + onConflict *string + + table string + conflictAction string + + fields []string + vals []string + args []any + debug bool +} + +func (t *Table) Insert() Insert { + qb := &insertQry{ + table: t.Name, + fields: make([]string, 0, t.FieldCount), + vals: make([]string, 0, t.FieldCount), + args: make([]any, 0, t.FieldCount), + debug: t.debug, + } + return qb +} + +func (q *insertQry) Set(field Field, val any) InsertClause { + q.fields = append(q.fields, field.Name()) + q.vals = append(q.vals, "$"+strconv.Itoa(len(q.args)+1)) + q.args = append(q.args, val) + return q +} + +func (q *insertQry) SetMap(cols map[Field]any) InsertClause { + for k, v := range cols { + q.Set(k, v) + } + return q +} + +func (q *insertQry) Returning(field Field) First { + col := field.Name() + q.returing = &col + return q +} + +func (q *insertQry) OnConflict(fields ...Field) Do { + if len(fields) > 0 { + sb := getSB() + defer putSB(sb) + + for i, f := range fields { + if i == 0 { + sb.WriteString(f.Name()) + } else { + sb.WriteString(", " + f.Name()) + } + } + c := sb.String() + q.onConflict = &c + } + return q +} + +func (q *insertQry) DoNothing() Execute { + q.conflictAction = "DO NOTHING" + return q +} + +func (q *insertQry) DoUpdate(fields ...Field) Execute { + var sb strings.Builder + for i, f := range fields { + col := f.Name() + if i == 0 { + fmt.Fprintf(&sb, "%s = EXCLUDED.%s", col, col) + } else { + fmt.Fprintf(&sb, ", %s = EXCLUDED.%s", col, col) + } + } + + q.conflictAction = "DO UPDATE SET " + sb.String() + return q +} + +func (q *insertQry) Exec(ctx context.Context) error { + _, err := poolPGX.Load().Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + return nil +} + +func (q *insertQry) ExecTx(ctx context.Context, tx pgx.Tx) error { + _, err := tx.Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + + return nil +} + +func (q *insertQry) First(ctx context.Context, dest ...any) error { + return poolPGX.Load().QueryRow(ctx, q.String(), q.args...).Scan(dest...) +} + +func (q *insertQry) FirstTx(ctx context.Context, tx pgx.Tx, dest ...any) error { + return tx.QueryRow(ctx, q.String(), q.args...).Scan(dest...) +} + +// build query string +func (q *insertQry) String() string { + sb := getSB() + defer putSB(sb) + + n := 12 + len(q.table) + 10 + for i, c := range q.fields { + n += len(c) + len(" =$,"+strconv.Itoa(i)) + } + + sb.Grow(n) + sb.WriteString("INSERT INTO ") + sb.WriteString(q.table) + sb.WriteString("(") + sb.WriteString(strings.Join(q.fields, ", ")) + sb.WriteString(") VALUES(") + sb.WriteString(strings.Join(q.vals, ", ")) + sb.WriteString(")") + + if q.onConflict != nil { + sb.WriteString(" ON CONFLICT (" + *q.onConflict + ") " + q.conflictAction) + } + + if q.returing != nil { + sb.WriteString(" RETURNING " + *q.returing) + } + + return sb.String() +} diff --git a/qry_select.go b/qry_select.go new file mode 100644 index 0000000..e73b6ca --- /dev/null +++ b/qry_select.go @@ -0,0 +1,286 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "context" + "errors" + "fmt" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +type ( + selectQry struct { + table string + fields []Field + args []any + join []string + where []Conditioner + groupBy []Field + having []Conditioner + orderBy []Field + limit int + offset int + debug bool + } + + CondAction uint8 + + Cond struct { + Val any + op string + Field string + len int + action CondAction + } + + CondGroup struct { + op string + cond []Conditioner + } +) + +// Contdition actions +const ( + CondActionNothing CondAction = iota + CondActionNeedToClose + CondActionSubQuery +) + +// Select clause +func (t Table) Select(field ...Field) SelectClause { + qb := &selectQry{ + table: t.Name, + debug: t.debug, + fields: field, + } + + return qb +} + +func (q *selectQry) Join(t Table, t1Field, t2Field Field) SelectClause { + q.join = append(q.join, "JOIN "+t.Name+" ON "+t1Field.String()+" = "+t2Field.String()) + return q +} + +func (q *selectQry) LeftJoin(t Table, t1Field, t2Field Field) SelectClause { + q.join = append(q.join, "LEFT JOIN "+t.Name+" ON "+t1Field.String()+" = "+t2Field.String()) + return q +} + +func (q *selectQry) RightJoin(t Table, t1Field, t2Field Field) SelectClause { + q.join = append(q.join, "RIGHT JOIN "+t.Name+" ON "+t1Field.String()+" = "+t2Field.String()) + return q +} + +func (q *selectQry) FullJoin(t Table, t1Field, t2Field Field) SelectClause { + q.join = append(q.join, "FULL JOIN "+t.Name+" ON "+t1Field.String()+" = "+t2Field.String()) + return q +} + +func (q *selectQry) CrossJoin(t Table) SelectClause { + q.join = append(q.join, "CROSS JOIN "+t.Name) + return q +} + +func (q *selectQry) Where(cond ...Conditioner) AfterWhere { + q.where = append(q.where, cond...) + return q +} + +func (q *selectQry) OrderBy(fields ...Field) AfterOrderBy { + q.orderBy = fields + return q +} + +func (q *selectQry) GroupBy(fields ...Field) AfterGroupBy { + q.groupBy = fields + return q +} + +func (q *selectQry) Having(cond ...Conditioner) AfterHaving { + q.having = append(q.having, cond...) + return q +} + +func (q *selectQry) Limit(v int) AfterLimit { + q.limit = v + return q +} + +func (q *selectQry) Offset(v int) AfterOffset { + q.offset = v + return q +} + +func (q *selectQry) First(ctx context.Context, dest ...any) error { + return poolPGX.Load().QueryRow(ctx, q.String(), q.args...).Scan(dest...) +} + +func (q *selectQry) FirstTx(ctx context.Context, tx pgx.Tx, dest ...any) error { + return tx.QueryRow(ctx, q.String(), q.args...).Scan(dest...) +} + +func (q *selectQry) All(ctx context.Context, row RowsCb) error { + rows, err := poolPGX.Load().Query(ctx, q.String(), q.args...) + if errors.Is(err, pgx.ErrNoRows) { + return ErrNoRows + } + defer rows.Close() + + for rows.Next() { + if err := row(rows); err != nil { + return err + } + } + + return nil +} + +func (q *selectQry) AllTx(ctx context.Context, tx pgx.Tx, row RowsCb) error { + rows, err := tx.Query(ctx, q.String(), q.args...) + if errors.Is(err, pgx.ErrNoRows) { + return ErrNoRows + } + defer rows.Close() + + for rows.Next() { + if err := row(rows); err != nil { + return err + } + } + + return nil +} + +func (q *selectQry) raw(prefixArgs []any) (string, []any) { + q.args = append(prefixArgs, q.args...) + return q.String(), q.args +} + +func (q *selectQry) String() string { + sb := getSB() + defer putSB(sb) + + sb.Grow(q.averageLen()) + + // SELECT + sb.WriteString("SELECT ") + sb.WriteString(joinFileds(q.fields)) + sb.WriteString(" FROM " + q.table) + + // JOIN + if len(q.join) > 0 { + sb.WriteString(" " + strings.Join(q.join, " ")) + } + + // WHERE + if len(q.where) > 0 { + sb.WriteString(" WHERE ") + var argIdx int + for i, c := range q.where { + argIdx = len(q.args) + if i > 0 { + sb.WriteString(" AND ") + } + sb.WriteString(c.Condition(&q.args, argIdx)) + } + } + + // GROUP BY + if len(q.groupBy) > 0 { + sb.WriteString(" GROUP BY ") + sb.WriteString(joinFileds(q.groupBy)) + } + + // HAVING + if len(q.having) > 0 { + sb.WriteString(" HAVING ") + var argIdx int + for i, c := range q.having { + argIdx = len(q.args) + if i > 0 { + sb.WriteString(" AND ") + } + sb.WriteString(c.Condition(&q.args, argIdx)) + } + } + + // ORDER BY + if len(q.orderBy) > 0 { + sb.WriteString(" ORDER BY ") + sb.WriteString(joinFileds(q.orderBy)) + } + + // LIMIT + if q.limit > 0 { + sb.WriteString(" LIMIT ") + sb.WriteString(strconv.Itoa(q.limit)) + } + + // OFFSET + if q.offset > 0 { + sb.WriteString(" OFFSET ") + sb.WriteString(strconv.Itoa(q.offset)) + } + + qry := sb.String() + if q.debug { + fmt.Println("***") + fmt.Println(qry) + fmt.Printf("%+v\n", q.args) + fmt.Println("***") + } + return qry +} + +func (q *selectQry) averageLen() int { + n := 12 + len(q.table) // SELECT FROM + for _, c := range q.fields { + n += (len(c) + 2) * len(q.table) // columns with tablename.columnname + } + + // JOIN + if len(q.join) > 0 { + for _, c := range q.join { + n += len(c) + 2 // with whitespace + } + } + + // WHERE + if len(q.where) > 0 { + n += 7 + len(q.args)*5 // WHERE with 2 sapces and each args roughly with min of 5 char + } + + // GROUP BY + if len(q.groupBy) > 0 { + n += 10 // GROUP BY + for _, c := range q.groupBy { + n += len(c) + 2 // one command and a whitespace + } + } + + // ORDER BY + if len(q.orderBy) > 0 { + n += 10 // ORDER BY + for _, c := range q.orderBy { + n += len(c) + 2 // one command and a whitespace + } + } + + // LIMIT + if q.limit > 0 { + n += 10 // LIMIT + } + + // OFFSET + if q.offset > 0 { + n += 10 // OFFSET + } + + return n +} diff --git a/qry_update.go b/qry_update.go new file mode 100644 index 0000000..47def29 --- /dev/null +++ b/qry_update.go @@ -0,0 +1,100 @@ +// Patial Tech. +// Author, Ankit Patial + +package pgm + +import ( + "context" + "strconv" + "strings" + + "github.com/jackc/pgx/v5" +) + +type updateQry struct { + table string + cols []string + condition []Conditioner + args []any + debug bool +} + +func (t *Table) Update() Update { + qb := &updateQry{ + table: t.Name, + debug: t.debug, + cols: make([]string, 0, t.FieldCount), + args: make([]any, 0, t.FieldCount), + } + return qb +} + +func (q *updateQry) Set(field Field, val any) UpdateClause { + col := field.Name() + q.cols = append(q.cols, col+"=$"+strconv.Itoa(len(q.args)+1)) + q.args = append(q.args, val) + return q +} + +func (q *updateQry) SetMap(cols map[Field]any) UpdateClause { + for k, v := range cols { + q.Set(k, v) + } + return q +} + +func (q *updateQry) Where(cond ...Conditioner) WhereOrExec { + q.condition = append(q.condition, cond...) + return q +} + +func (q *updateQry) Exec(ctx context.Context) error { + _, err := poolPGX.Load().Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + + return nil +} + +func (q *updateQry) ExecTx(ctx context.Context, tx pgx.Tx) error { + _, err := tx.Exec(ctx, q.String(), q.args...) + if err != nil { + return err + } + + return nil +} + +func (q *updateQry) String() string { + sb := getSB() + defer putSB(sb) + + n := 7 + len(q.table) + 5 // "UPDATE q.table SET + for _, col := range q.cols { + n += len(col) + 5 + } + if len(q.condition) > 0 { + n += 7 + len(q.condition)*5 // WHERE with condition, 5 is just avg small min val + } + sb.Grow(n) + + // UPDATE + sb.WriteString("UPDATE " + q.table + " SET ") + sb.WriteString(strings.Join(q.cols, ", ")) + + // WHERE + if len(q.condition) > 0 { + sb.WriteString(" WHERE ") + var argIdx int + for i, c := range q.condition { + argIdx = len(q.args) + if i > 0 { + sb.WriteString(" AND ") + } + sb.WriteString(c.Condition(&q.args, argIdx)) + } + } + + return sb.String() +}