You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

130 lines
5.3 KiB
Go

package main
import (
"database/sql"
"flag"
"log"
_ "github.com/mattn/go-sqlite3"
)
func main() {
log.Println("Hello, world!")
db_path := flag.String("database", "./data/trysqlite.db", "Path to a Sqlite3 database")
flag.Parse()
// TODO figure out what query string options to be using
full_database_path := "file:" + *db_path + "?cache=shared"
log.Printf("Using Database: %s", full_database_path)
// db is a Handle backed by a go database/sql connection pool. It is not a connection in of itself, nor is it the actual connection pool
// The first connection is made lazily by the handle, and this doesn't validate any of the connection parameters either at this point.
// It simply prepares the abstraction for use.
db, err := sql.Open("sqlite3", full_database_path)
if err != nil {
log.Fatalf("Failed to connect to sqlite database: %s", full_database_path)
}
// It is idiomatic to defer db.Close() if the sql.DB should not have a lifetime beyond the scope of the function.
//Although its idiomatic to Close() the database when youre finished with it, the sql.DB object is designed to be long-lived.
//Dont Open() and Close() databases frequently. Instead, create one sql.DB object for each distinct datastore you need to access,
// and keep it until the program is done accessing that datastore.
// Pass it around as needed, or make it available somehow globally, but keep it open.
// And dont Open() and Close() from a short-lived function. Instead, pass the sql.DB
// into that short-lived function as an argument.
//If you dont treat the sql.DB as a long-lived object, you could experience problems such as poor reuse and sharing of connections,
// running out of available network resources, or sporadic failures due to a lot of TCP connections remaining in TIME_WAIT status.
// Such problems are signs that youre not using database/sql as it was designed.
defer db.Close()
// If you want to check right away that the db is available and accessible you can do this and check for errs
err = db.Ping()
if err != nil {
log.Fatal("DB Ping failed. Check database and database connection parameters")
}
var (
id int
subject string
todo string
)
// db.Query vs db.Exec. If a function name includes Query, it is designed to ask a question of the database,
// and will return a set of rows, even if its empty. Statements that dont return rows should not use Query functions;
// they should use Exec()
rows, err := db.Query("SELECT subject FROM todos")
if err != nil {
log.Fatal(err)
}
// It is important to defer closing a row object also. Why? It will release the memory and the network connection. So it
// also prevents resource leaks. So you are a good steward, you clean up after yourself.
// You don't wait for the garbage collector to do it.
// as long as theres an open result set (represented by rows), the underlying connection is busy and cant be used for
//any other query.
// Also never defer in a loop. Defer only runs on function exit.
defer rows.Close()
// Iterate through each. Internally rows.Next will hit an EOF error it will call rows.close() for you freeing up the connection resources.
// but you shouldn't rely on that alone. rows.Close() will run on function exit, but this example is a Main func, so it's not realistic
// still get in the habit
for rows.Next() {
err = rows.Scan(&subject)
if err != nil {
log.Fatal(err)
}
log.Printf("Subject is %s", subject)
}
// Dont just assume that the loop iterates until youve processed all the rows. Always check.
err = rows.Err()
if err != nil {
log.Fatal(err)
}
// You could also clean up at this point. The call is supposed to be idempotent, but defer is more idiomatic.
// You don't want to call close if there was an Error because of that could cause a Panic.
rows.Close()
// If never want to do string concat from user input into a sql statement you run unless you like sql injection attacks
// expects to return one row. If no row it will delay throwing error until Scan is called
row := db.QueryRow("SELECT id, subject, todo FROM todos WHERE id = ?", 1)
6 months ago
// Scan does the heavily lifting for you when casting from db type to go type. It will cast based on the variable type.
// failures of course will be returned as part of the err
err = row.Scan(&id, &subject, &todo)
if err != nil {
log.Fatal(err)
}
log.Printf("id: %d, subject: %s, todo: %s", id, subject, todo)
// This should error on scan
6 months ago
// We can also use this short hand!
err = db.QueryRow("SELECT * FROM todos WHERE id = ?", 100_000).Scan(&id, &subject, &todo)
if err != nil {
log.Print(err.Error())
}
// There is no row.Close(). Rows objects have a Close() method. No explicit closing required.
6 months ago
//Prepared Queries
// If you will be using the same query over and over you should just prepare the query.
// $N is the postgres query param which SQLite can use too ? is mysql's
//Under the hood, db.Query() actually prepares, executes, and closes a prepared statement
// Thats three round-trips to the database.
stmt, err := db.Prepare("SELECT * FROM todos WHERE id = $N")
if err != nil {
log.Fatal(err)
}
//This prepared statement *sql.Stmt is tied to the database connection from which it was created, and it holds onto that connection until you call stmt.Close()
defer stmt.Close()
}