func DefineCategoryTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Category{}, "category") t.SetKeys(true, "id") t.ColMap("description"). SetUnique(true). SetNotNull(true) }
func DefineVirusdefTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(VirusDef{}, "virusdef") t.SetKeys(true, "id") t.ColMap("name"). SetUnique(true). SetNotNull(true) }
func getSequence(dbMap *gorp.DbMap) (uint64, error) { tx, err := dbMap.Begin() if err != nil { return 0, err } defer func() { if err != nil { tx.Rollback() } else { tx.Commit() } }() st, err := tx.Prepare("UPDATE seq set id = LAST_INSERT_ID(id + 1)") if err != nil { return 0, err } defer st.Close() st.Exec() stmt, err := tx.Prepare("SELECT LAST_INSERT_ID()") defer stmt.Close() var id uint64 err = stmt.QueryRow().Scan(&id) if err != nil { return 0, err } return id, nil }
func (s SqlChannelStore) get(id string, master bool) StoreChannel { storeChannel := make(StoreChannel) go func() { result := StoreResult{} var db *gorp.DbMap if master { db = s.GetMaster() } else { db = s.GetReplica() } if obj, err := db.Get(model.Channel{}, id); err != nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.find.app_error", nil, "id="+id+", "+err.Error()) } else if obj == nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.existing.app_error", nil, "id="+id) } else { result.Data = obj.(*model.Channel) } storeChannel <- result close(storeChannel) }() return storeChannel }
// Query generates a Query for a target model. The target that is // passed in must be a pointer to a struct, and will be used as a // reference for query construction. func Query(m *gorp.DbMap, exec gorp.SqlExecutor, target interface{}) interfaces.Query { // Handle non-standard dialects switch src := m.Dialect.(type) { case gorp.MySQLDialect: m.Dialect = dialects.MySQLDialect{src} case gorp.SqliteDialect: m.Dialect = dialects.SqliteDialect{src} default: } plan := &QueryPlan{ dbMap: m, executor: exec, } targetVal := reflect.ValueOf(target) if targetVal.Kind() != reflect.Ptr || targetVal.Elem().Kind() != reflect.Struct { plan.Errors = append(plan.Errors, errors.New("A query target must be a pointer to struct")) } targetTable, err := plan.mapTable(targetVal) if err != nil { plan.Errors = append(plan.Errors, err) return plan } plan.target = targetVal plan.table = targetTable return plan }
func DefineProfileTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Profile{}, "profile") t.SetKeys(true, "id") t.ColMap("name"). SetUnique(true). SetNotNull(true) }
func DefineUtmstatusTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(UtmStatus{}, "utmstatus") t.SetKeys(true, "id") t.ColMap("name"). SetUnique(true). SetNotNull(true) }
func DefineServiceTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Service{}, "service") t.SetKeys(true, "id") t.ColMap("name"). SetUnique(true). SetNotNull(true) }
func InitAllTables(dbm *gorp.DbMap) error { defineAllTables(dbm) if err := dbm.CreateTablesIfNotExists(); err != nil { return err } txn, err := dbm.Begin() if err != nil { return err } defer txn.Rollback() count, err := CountRows("loglevel", "id", txn) if err != nil { return err } if count == 0 { InitLoglevelTable(txn) } count, err = CountRows("utmstatus", "id", txn) if err != nil { return err } if count == 0 { InitUtmstatusTable(txn) } txn.Commit() return nil }
func DefineHostTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Host{}, "host") t.SetKeys(true, "id") t.ColMap("name"). SetUnique(true). SetNotNull(true) }
func (s SqlChannelStore) get(id string, master bool) StoreChannel { storeChannel := make(StoreChannel) go func() { result := StoreResult{} var db *gorp.DbMap if master { db = s.GetMaster() } else { db = s.GetReplica() } if obj, err := db.Get(model.Channel{}, id); err != nil { result.Err = model.NewAppError("SqlChannelStore.Get", "We encountered an error finding the channel", "id="+id+", "+err.Error()) } else if obj == nil { result.Err = model.NewAppError("SqlChannelStore.Get", "We couldn't find the existing channel", "id="+id) } else { result.Data = obj.(*model.Channel) } storeChannel <- result close(storeChannel) }() return storeChannel }
func NewConnection(cfg Config) (*gorp.DbMap, error) { if !strings.HasPrefix(cfg.DSN, "postgres://") { return nil, errors.New("unrecognized database driver") } db, err := sql.Open("postgres", cfg.DSN) if err != nil { return nil, err } db.SetMaxIdleConns(cfg.MaxIdleConnections) db.SetMaxOpenConns(cfg.MaxOpenConnections) dbm := gorp.DbMap{ Db: db, Dialect: gorp.PostgresDialect{}, } for _, t := range tables { tm := dbm.AddTableWithName(t.model, t.name).SetKeys(t.autoinc, t.pkey...) for _, unique := range t.unique { cm := tm.ColMap(unique) if cm == nil { return nil, fmt.Errorf("no such column: %q", unique) } cm.SetUnique(true) } } return &dbm, nil }
func SelectAllEquipmentInfo(dbm *gorp.DbMap) ([]EquipmentInfo, error) { var results []EquipmentInfo q := sq.Select( `"EquipmentInfo"."ID"`, `"EquipmentInfo"."CertifiedName"`, `"RadioAccessTechnology"."Description" AS EquipmentType`, `"EquipmentInfo"."Model"`, `"EquipmentInfo"."AuthNumber"`, `"EquipmentInfo"."RadioType"`, `"EquipmentInfo"."IsApplied1421"`, `"EquipmentInfo"."AuthDate"`, `"EquipmentInfo"."Note"`, `"EquipmentInfo"."File"`, ).From( "EquipmentInfo", ).LeftJoin( `"RadioAccessTechnology" ON "EquipmentInfo"."EquipmentType" = "RadioAccessTechnology"."EquipmentType"`, ) sql, _, err := q.ToSql() if err != nil { return results, err } if _, err := dbm.Select(&results, sql); err != nil { return nil, err } return results, nil }
func runQueryPlanSuite(t *testing.T, dialect gorp.Dialect, connection *sql.DB) { dbMap := new(gorp.DbMap) dbMap.Dialect = dialect dbMap.Db = connection testSuite := new(QueryPlanTestSuite) testSuite.Map = dbMap suite.Run(t, testSuite) }
func DefineVirusTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Virus{}, "virus") t.SetKeys(false, "id") SetNotNull(t, "status", "virusdef") t.ColMap("url"). SetMaxSize(2048). SetNotNull(true) }
func DefineLoglevelTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(LogLevel{}, "loglevel") t.SetKeys(true, "id") t.ColMap("name"). SetMaxSize(15). SetUnique(true). SetNotNull(true) }
func DefineUserTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(User{}, "user") t.SetKeys(true, "id") t.ColMap("name"). SetMaxSize(45). SetUnique(true). SetNotNull(true) }
// TODO: DRY the suite initialization code func runQueryLanguageSuite(t *testing.T, dialect gorp.Dialect, connection *sql.DB) { dbMap := new(gorp.DbMap) dbMap.Dialect = dialect dbMap.Db = connection //dbMap.TraceOn("TEST DB: ", log.New(os.Stdout, "", log.LstdFlags)) testSuite := new(QueryLanguageTestSuite) testSuite.Map = dbMap suite.Run(t, testSuite) }
func LoadSQLDate(dbMap *gorp.DbMap, t interface{}, rows []map[string]interface{}) { for _, row := range rows { err := mapstructure.Decode(row, t) checkErr(err, "") err = dbMap.Insert(t) checkErr(err, "") } }
func DefineDeviceTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Device{}, "device") t.SetKeys(true, "id") t.ColMap("name"). SetNotNull(true) t.ColMap("serial"). SetMaxSize(16). SetUnique(true). SetNotNull(true) }
func DefineLogTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(Log{}, "log") t.SetKeys(true, "id") SetNotNull(t, "file", "logtype", "device", "level", "user", "service", "date", "policy_id", "source_if", "dest_port", "dest_if", "sent_byte", "received_byte") t.ColMap("source_ip").SetMaxSize(45).SetNotNull(true) t.ColMap("dest_ip").SetMaxSize(45).SetNotNull(true) t.ColMap("message").SetMaxSize(255) }
func DefineFileTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(File{}, "file") t.SetKeys(true, "id") t.ColMap("begin_dt"). SetUnique(true). SetNotNull(true) t.ColMap("end_dt"). SetNotNull(true) t.ColMap("count_lines"). SetNotNull(true) }
func DefineLogtypeTable(dbm *gorp.DbMap) { t := dbm.AddTableWithName(LogType{}, "logtype") t.SetKeys(true, "id") t.ColMap("level1"). SetMaxSize(45). SetNotNull(true) t.ColMap("level2"). SetMaxSize(45). SetNotNull(true) t.SetUniqueTogether("level1", "level2") }
func process(metric data.Metric, dbMap *gorp.DbMap) error { log.Printf("Metric %+v", metric) now := time.Now().UTC() account := Account{Name: metric.Username, Time: now} err := dbMap.Insert(&account) if err, ok := err.(*pq.Error); ok && err.Code.Name() != PG_UNIQUE_VIOLATION_ERR { log.Fatalf("Error inserting account. ERR: %+v", err) } return nil }
func NewConnection(cfg Config) (*gorp.DbMap, error) { u, err := url.Parse(cfg.DSN) if err != nil { return nil, fmt.Errorf("parse DSN: %v", err) } var ( db *sql.DB dialect gorp.Dialect ) switch u.Scheme { case "postgres": db, err = sql.Open("postgres", cfg.DSN) if err != nil { return nil, err } db.SetMaxIdleConns(cfg.MaxIdleConnections) db.SetMaxOpenConns(cfg.MaxOpenConnections) dialect = gorp.PostgresDialect{} case "sqlite3": db, err = sql.Open("sqlite3", u.Host) if err != nil { return nil, err } if u.Host == ":memory:" { // NOTE(ericchiang): sqlite3 coordinates concurrent clients through file locks. // In memory databases do not support concurrent calls. Limit the number of // open connections to 1. // // See: https://www.sqlite.org/faq.html#q5 db.SetMaxOpenConns(1) } dialect = gorp.SqliteDialect{} default: return nil, errors.New("unrecognized database driver") } dbm := gorp.DbMap{Db: db, Dialect: dialect} for _, t := range tables { tm := dbm.AddTableWithName(t.model, t.name).SetKeys(t.autoinc, t.pkey...) for _, unique := range t.unique { cm := tm.ColMap(unique) if cm == nil { return nil, fmt.Errorf("no such column: %q", unique) } cm.SetUnique(true) } } return &dbm, nil }
func (s SqlChannelStore) get(id string, master bool, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() var db *gorp.DbMap if master { db = s.GetMaster() } else { db = s.GetReplica() } if allowFromCache { if cacheItem, ok := channelCache.Get(id); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Channel") } result.Data = cacheItem.(*model.Channel) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel") } } if obj, err := db.Get(model.Channel{}, id); err != nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.find.app_error", nil, "id="+id+", "+err.Error()) } else if obj == nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.existing.app_error", nil, "id="+id) } else { result.Data = obj.(*model.Channel) channelCache.AddWithExpiresInSecs(id, obj.(*model.Channel), CHANNEL_MEMBERS_COUNTS_CACHE_SEC) } storeChannel <- result close(storeChannel) }() return storeChannel }
func IsImportedEquipmentInfo(file string, dbm *gorp.DbMap) (int64, error) { q := sq.Select( "count(*)", ).From( "EquipmentInfo", ).Where( sq.Eq{"File": file}, ) sql, args, err := q.ToSql() if err != nil { return -1, err } return dbm.SelectInt(sql, args...) }
func setupConnection(con_type string, driver string, dataSource string, maxIdle int, maxOpen int, trace bool) *gorp.DbMap { charset := "" if strings.Index(dataSource, "?") > -1 { charset = "&charset=utf8mb4,utf8" } else { charset = "?charset=utf8mb4,utf8" } db, err := dbsql.Open(driver, dataSource+charset) if err != nil { l4g.Critical("Failed to open sql connection to '%v' err:%v", dataSource, err) time.Sleep(time.Second) panic("Failed to open sql connection" + err.Error()) } l4g.Info("Pinging sql %v database at '%v'", con_type, dataSource) err = db.Ping() if err != nil { l4g.Critical("Failed to ping db err:%v", err) time.Sleep(time.Second) panic("Failed to open sql connection " + err.Error()) } db.SetMaxIdleConns(maxIdle) db.SetMaxOpenConns(maxOpen) var dbmap *gorp.DbMap if driver == "sqlite3" { dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.SqliteDialect{}} } else if driver == "mysql" { dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.MySQLDialect{Engine: "InnoDB", Encoding: "UTF8MB4"}} } else { l4g.Critical("Failed to create dialect specific driver") time.Sleep(time.Second) panic("Failed to create dialect specific driver " + err.Error()) } if trace { dbmap.TraceOn("", sqltrace.New(os.Stdout, "sql-trace:", sqltrace.Lmicroseconds)) } return dbmap }
func importToDB(res *ScriptOutput, name string, dbm *gorp.DbMap) error { count, err := IsImportedEquipmentInfo(name, dbm) if err != nil { return err } if count > 0 { log.Printf("skipping import: %s", name) return nil } for _, row := range res.Result { if row[0] == "工事設計認証を受けた者の氏名又は名称" { continue } t, err := time.Parse("2006-01-02", row[6]) if err != nil { continue } obj := EquipmentInfo{ CertifiedName: row[0], EquipmentType: row[1], Model: row[2], AuthNumber: row[3], RadioType: row[4], IsApplied1421: row[5], AuthDate: t, Note: row[7], File: name, } log.Printf("insert: %v", obj) if err := dbm.Insert(&obj); err != nil { if !strings.HasPrefix(err.Error(), "UNIQUE constraint failed") { return err } log.Printf("skipping insert: %s", err.Error()) } } return nil }
func setupConnection(con_type string, driver string, dataSource string, maxIdle int, maxOpen int, trace bool) *gorp.DbMap { db, err := dbsql.Open(driver, dataSource) if err != nil { l4g.Critical(utils.T("store.sql.open_conn.critical"), err) time.Sleep(time.Second) os.Exit(EXIT_DB_OPEN) } l4g.Info(utils.T("store.sql.pinging.info"), con_type) err = db.Ping() if err != nil { l4g.Critical(utils.T("store.sql.ping.critical"), err) time.Sleep(time.Second) os.Exit(EXIT_PING) } db.SetMaxIdleConns(maxIdle) db.SetMaxOpenConns(maxOpen) db.SetConnMaxLifetime(time.Duration(MAX_DB_CONN_LIFETIME) * time.Minute) var dbmap *gorp.DbMap if driver == "sqlite3" { dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.SqliteDialect{}} } else if driver == model.DATABASE_DRIVER_MYSQL { dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.MySQLDialect{Engine: "InnoDB", Encoding: "UTF8MB4"}} } else if driver == model.DATABASE_DRIVER_POSTGRES { dbmap = &gorp.DbMap{Db: db, TypeConverter: mattermConverter{}, Dialect: gorp.PostgresDialect{}} } else { l4g.Critical(utils.T("store.sql.dialect_driver.critical")) time.Sleep(time.Second) os.Exit(EXIT_NO_DRIVER) } if trace { dbmap.TraceOn("", sqltrace.New(os.Stdout, "sql-trace:", sqltrace.Lmicroseconds)) } return dbmap }