Documentation
¶
Index ¶
- func BatchDBInserter(db, table, fields string, bufferSize int) (insert func(any), flushClose func())
- func BatchInserter(db *sql.DB, table string, fields string, bufferSize int) (insert func(any), flush func())
- func EscapeValue(val any) string
- func PostgreBatchDBInserter(dsn, table, fields string, bufferSize int) (insert func(any), flushClose func())
- func PostgreBatchInserter(db *sql.DB, table string, fields string, bufferSize int) (insert func(any), flush func())
- func SqlIterator(connection string, sql_ string, processor SqlRowProcessor)
- type SqlRow
- type SqlRowProcessor
- type SqlRows
Examples ¶
Constants ¶
This section is empty.
Variables ¶
This section is empty.
Functions ¶
func BatchDBInserter ¶
func BatchDBInserter(db, table, fields string, bufferSize int) (insert func(any), flushClose func())
BatchDBInserter creates a batch inserter with a new database connection.
Usage with auto-escaping (recommended):
connect_string := "parf:passwd(rxdb:3306)/visits_log"
insert, flush := sql.BatchDBInserter(connect_string, "tableName", "field1, field2, ...", 10000)
defer flush()
for .... {
insert([]any{val1, val2, val3, val4, val5}) // Auto-escaped
}
Usage with manual string (legacy):
for .... {
values := fmt.Sprintf("%d, %d, %d, %d, %d", ...) // You must escape
insert(values)
}
func BatchInserter ¶
func BatchInserter(db *sql.DB, table string, fields string, bufferSize int) (insert func(any), flush func())
BatchInserter creates a batch inserter for an existing database connection. It accumulates insert values and flushes them in batches when bufferSize is reached.
The insert function accepts either:
- string: values as SQL string (UNSAFE - you must escape yourself)
- slice/array: values as slice (SAFE - automatically escaped)
Example with auto-escaping:
insert([]any{1, "John's Pizza", 99.95})
insert([]string{"value1", "value2", "value3"})
insert([]int{1, 2, 3})
Example with manual escaping (backward compatible):
insert("1, 'John''s Pizza', 99.95")
Example (AutoEscape) ¶
Example showing expected usage with auto-escaping (new style - recommended)
package main
import ()
func main() {
// This would require a real database connection
// db, _ := sql.Open("mysql", "user:pass@tcp(host:3306)/dbname")
// defer db.Close()
//
// insert, flush := hbsql.BatchInserter(db, "users", "id, name, email", 1000)
// defer flush()
//
// for i := 0; i < 10000; i++ {
// // Auto-escaping - SAFE, handles quotes and special characters
// insert([]any{i, fmt.Sprintf("user%d", i), fmt.Sprintf("user%[email protected]", i)})
// }
//
// // Also works with typed slices:
// insert([]string{"value1", "value2", "value3"})
// insert([]int{1, 2, 3})
// insert([]any{1, "John's Pizza", 99.95, true, nil})
}
Example (Manual) ¶
Example showing expected usage with manual escaping (old style)
package main
import ()
func main() {
// This would require a real database connection
// db, _ := sql.Open("mysql", "user:pass@tcp(host:3306)/dbname")
// defer db.Close()
//
// insert, flush := hbsql.BatchInserter(db, "users", "id, name, email", 1000)
// defer flush()
//
// for i := 0; i < 10000; i++ {
// // Manual escaping - UNSAFE if data comes from user input
// values := fmt.Sprintf("%d, \"user%d\", \"user%[email protected]\"", i, i, i)
// insert(values)
// }
}
func EscapeValue ¶
EscapeValue converts a value to a SQL-safe string representation. Handles NULL, strings (with escaping), numbers, booleans, and other types. This function is exported so users can use it for manual escaping if needed.
func PostgreBatchDBInserter ¶
func PostgreBatchDBInserter(dsn, table, fields string, bufferSize int) (insert func(any), flushClose func())
PostgreBatchDBInserter creates a batch inserter with a new PostgreSQL connection.
Usage:
connect_string := "host=localhost port=5432 user=myuser password=mypass dbname=mydb sslmode=disable"
insert, flush := sql.PostgreBatchDBInserter(connect_string, "schema.table", "col1, col2, col3", 1000)
defer flush()
for ... {
insert([]any{val1, val2, val3})
}
func PostgreBatchInserter ¶
func PostgreBatchInserter(db *sql.DB, table string, fields string, bufferSize int) (insert func(any), flush func())
PostgreBatchInserter creates a batch inserter optimized for PostgreSQL. It's functionally identical to BatchInserter but makes the intent clear and can be extended with PostgreSQL-specific optimizations in the future.
Usage:
insert, flush := sql.PostgreBatchInserter(db, "schema.table", "col1, col2, col3", 1000)
defer flush()
for ... {
insert([]any{val1, val2, val3}) // Auto-escaped
}
func SqlIterator ¶
func SqlIterator(connection string, sql_ string, processor SqlRowProcessor)
SqlIterator iterates over SQL query and shows statistics
sample connection: "parf:mv700@tcp(hdb2:3306)/visits_log" sample sql: "SELECT FL, T, C, B, G, V, Blocked, L FROM flTCBGVL limit 10"
Example ¶
Example showing expected usage
package main
import ()
func main() {
// This would require a real database connection
// hbsql.SqlIterator("user:pass@tcp(host:3306)/db", "SELECT * FROM users LIMIT 10", func(row *sql.Rows) {
// var id int
// var name string
// row.Scan(&id, &name)
// fmt.Printf("User: %d - %s\n", id, name)
// })
}
Types ¶
type SqlRowProcessor ¶
SqlRowProcessor is a function type for processing SQL rows
type SqlRows ¶
type SqlRows []SqlRow
func WildSqlQuery ¶
WildSqlQuery executes a SQL query and returns results as a slice of maps. Each row is represented as a map with column names as keys and string values.
Example ¶
Example test showing the expected structure
package main
import (
_ "github.com/go-sql-driver/mysql"
)
func main() {
// This would require a real database connection
// db, _ := sql.Open("mysql", "user:pass@tcp(host:3306)/dbname")
// defer db.Close()
//
// rows, err := hbsql.WildSqlQuery(db, "SELECT id, name FROM users LIMIT 10")
// if err != nil {
// log.Fatal(err)
// }
//
// for _, row := range rows {
// fmt.Printf("ID: %s, Name: %s\n", row["id"], row["name"])
// }
}