func (c *WebConn) WritePump() { ticker := time.NewTicker(PING_PERIOD) authTicker := time.NewTicker(AUTH_TIMEOUT) defer func() { ticker.Stop() authTicker.Stop() c.WebSocket.Close() }() for { select { case msg, ok := <-c.Send: if !ok { c.WebSocket.SetWriteDeadline(time.Now().Add(WRITE_WAIT)) c.WebSocket.WriteMessage(websocket.CloseMessage, []byte{}) return } c.WebSocket.SetWriteDeadline(time.Now().Add(WRITE_WAIT)) if err := c.WebSocket.WriteMessage(websocket.TextMessage, msg.GetPreComputeJson()); err != nil { // browsers will appear as CloseNoStatusReceived if websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseNoStatusReceived) { l4g.Debug(fmt.Sprintf("websocket.send: client side closed socket userId=%v", c.UserId)) } else { l4g.Debug(fmt.Sprintf("websocket.send: closing websocket for userId=%v, error=%v", c.UserId, err.Error())) } return } if msg.EventType() == model.WEBSOCKET_EVENT_POSTED { if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementPostBroadcast() } } case <-ticker.C: c.WebSocket.SetWriteDeadline(time.Now().Add(WRITE_WAIT)) if err := c.WebSocket.WriteMessage(websocket.PingMessage, []byte{}); err != nil { // browsers will appear as CloseNoStatusReceived if websocket.IsCloseError(err, websocket.CloseNormalClosure, websocket.CloseNoStatusReceived) { l4g.Debug(fmt.Sprintf("websocket.ticker: client side closed socket userId=%v", c.UserId)) } else { l4g.Debug(fmt.Sprintf("websocket.ticker: closing websocket for userId=%v error=%v", c.UserId, err.Error())) } return } case <-authTicker.C: if c.SessionToken == "" { l4g.Debug(fmt.Sprintf("websocket.authTicker: did not authenticate ip=%v", c.WebSocket.RemoteAddr())) return } authTicker.Stop() } } }
func saveConfig(c *Context, w http.ResponseWriter, r *http.Request) { cfg := model.ConfigFromJson(r.Body) if cfg == nil { c.SetInvalidParam("saveConfig", "config") return } cfg.SetDefaults() utils.Desanitize(cfg) if err := cfg.IsValid(); err != nil { c.Err = err return } if err := utils.ValidateLdapFilter(cfg); err != nil { c.Err = err return } if *utils.Cfg.ClusterSettings.Enable { c.Err = model.NewLocAppError("saveConfig", "ent.cluster.save_config.error", nil, "") return } c.LogAudit("") //oldCfg := utils.Cfg utils.SaveConfig(utils.CfgFileName, cfg) utils.LoadConfig(utils.CfgFileName) if einterfaces.GetMetricsInterface() != nil { if *utils.Cfg.MetricsSettings.Enable { einterfaces.GetMetricsInterface().StartServer() } else { einterfaces.GetMetricsInterface().StopServer() } } // Future feature is to sync the configuration files // if einterfaces.GetClusterInterface() != nil { // err := einterfaces.GetClusterInterface().ConfigChanged(cfg, oldCfg, true) // if err != nil { // c.Err = err // return // } // } // start/restart email batching job if necessary InitEmailBatching() rdata := map[string]string{} rdata["status"] = "OK" w.Write([]byte(model.MapToJson(rdata))) }
func GetSession(token string) *model.Session { metrics := einterfaces.GetMetricsInterface() var session *model.Session if ts, ok := sessionCache.Get(token); ok { session = ts.(*model.Session) if metrics != nil { metrics.IncrementMemCacheHitCounter("Session") } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Session") } } if session == nil { if sessionResult := <-Srv.Store.Session().Get(token); sessionResult.Err != nil { l4g.Error(utils.T("api.context.invalid_token.error"), token, sessionResult.Err.DetailedError) } else { session = sessionResult.Data.(*model.Session) if session.IsExpired() || session.Token != token { return nil } else { AddSessionToCache(session) return session } } } return session }
func (us SqlChannelStore) IsUserInChannelUseCache(userId string, channelId string) bool { metrics := einterfaces.GetMetricsInterface() if cacheItem, ok := allChannelMembersForUserCache.Get(userId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("All Channel Members for User") } ids := cacheItem.(map[string]string) if _, ok := ids[channelId]; ok { return true } else { return false } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("All Channel Members for User") } } if result := <-us.GetAllChannelMembersForUser(userId, true); result.Err != nil { l4g.Error("SqlChannelStore.IsUserInChannelUseCache: " + result.Err.Error()) return false } else { ids := result.Data.(map[string]string) if _, ok := ids[channelId]; ok { return true } else { return false } } }
func (s SqlChannelStore) GetMemberCount(channelId string, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) metrics := einterfaces.GetMetricsInterface() go func() { result := StoreResult{} if allowFromCache { if cacheItem, ok := channelMemberCountsCache.Get(channelId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Channel Member Counts") } result.Data = cacheItem.(int64) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel Member Counts") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel Member Counts") } } count, err := s.GetReplica().SelectInt(` SELECT count(*) FROM ChannelMembers, Users WHERE ChannelMembers.UserId = Users.Id AND ChannelMembers.ChannelId = :ChannelId AND Users.DeleteAt = 0`, map[string]interface{}{"ChannelId": channelId}) if err != nil { result.Err = model.NewLocAppError("SqlChannelStore.GetMemberCount", "store.sql_channel.get_member_count.app_error", nil, "channel_id="+channelId+", "+err.Error()) } else { result.Data = count if allowFromCache { channelMemberCountsCache.AddWithExpiresInSecs(channelId, count, CHANNEL_MEMBERS_COUNTS_CACHE_SEC) } } storeChannel <- result close(storeChannel) }() return storeChannel }
func (s SqlChannelStore) GetAllChannelMembersForUser(userId string, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() if allowFromCache { if cacheItem, ok := allChannelMembersForUserCache.Get(userId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("All Channel Members for User") } result.Data = cacheItem.(map[string]string) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("All Channel Members for User") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("All Channel Members for User") } } var data []allChannelMember _, err := s.GetReplica().Select(&data, "SELECT ChannelId, Roles FROM Channels, ChannelMembers WHERE Channels.Id = ChannelMembers.ChannelId AND ChannelMembers.UserId = :UserId AND Channels.DeleteAt = 0", map[string]interface{}{"UserId": userId}) if err != nil { result.Err = model.NewLocAppError("SqlChannelStore.GetAllChannelMembersForUser", "store.sql_channel.get_channels.get.app_error", nil, "userId="+userId+", err="+err.Error()) } else { ids := make(map[string]string) for i := range data { ids[data[i].ChannelId] = data[i].Roles } result.Data = ids if allowFromCache { allChannelMembersForUserCache.AddWithExpiresInSecs(userId, ids, ALL_CHANNEL_MEMBERS_FOR_USER_CACHE_SEC) } } storeChannel <- result close(storeChannel) }() return storeChannel }
func (s SqlChannelStore) get(id string, master bool, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() var db *gorp.DbMap if master { db = s.GetMaster() } else { db = s.GetReplica() } if allowFromCache { if cacheItem, ok := channelCache.Get(id); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Channel") } result.Data = cacheItem.(*model.Channel) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel") } } if obj, err := db.Get(model.Channel{}, id); err != nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.find.app_error", nil, "id="+id+", "+err.Error()) } else if obj == nil { result.Err = model.NewLocAppError("SqlChannelStore.Get", "store.sql_channel.get.existing.app_error", nil, "id="+id) } else { result.Data = obj.(*model.Channel) channelCache.AddWithExpiresInSecs(id, obj.(*model.Channel), CHANNEL_MEMBERS_COUNTS_CACHE_SEC) } storeChannel <- result close(storeChannel) }() return storeChannel }
func (s SqlPostStore) GetEtag(channelId string, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() if allowFromCache { if cacheItem, ok := lastPostTimeCache.Get(channelId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Last Post Time") } result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, cacheItem.(int64)) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Post Time") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Post Time") } } var et etagPosts err := s.GetReplica().SelectOne(&et, "SELECT Id, UpdateAt FROM Posts WHERE ChannelId = :ChannelId ORDER BY UpdateAt DESC LIMIT 1", map[string]interface{}{"ChannelId": channelId}) if err != nil { result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, model.GetMillis()) } else { result.Data = fmt.Sprintf("%v.%v", model.CurrentVersion, et.UpdateAt) } lastPostTimeCache.AddWithExpiresInSecs(channelId, et.UpdateAt, LAST_POST_TIME_CACHE_SEC) storeChannel <- result close(storeChannel) }() return storeChannel }
func HandleEtag(etag string, routeName string, w http.ResponseWriter, r *http.Request) bool { metrics := einterfaces.GetMetricsInterface() if et := r.Header.Get(model.HEADER_ETAG_CLIENT); len(etag) > 0 { if et == etag { w.Header().Set(model.HEADER_ETAG_SERVER, etag) w.WriteHeader(http.StatusNotModified) if metrics != nil { metrics.IncrementEtagHitCounter(routeName) } return true } } if metrics != nil { metrics.IncrementEtagMissCounter(routeName) } return false }
func (s SqlChannelStore) GetMemberCountFromCache(channelId string) int64 { metrics := einterfaces.GetMetricsInterface() if cacheItem, ok := channelMemberCountsCache.Get(channelId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Channel Member Counts") } return cacheItem.(int64) } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Channel Member Counts") } } if result := <-s.GetMemberCount(channelId, true); result.Err != nil { return 0 } else { return result.Data.(int64) } }
func GetStatusesByIds(userIds []string) (map[string]interface{}, *model.AppError) { statusMap := map[string]interface{}{} metrics := einterfaces.GetMetricsInterface() missingUserIds := []string{} for _, userId := range userIds { if result, ok := statusCache.Get(userId); ok { statusMap[userId] = result.(*model.Status).Status if metrics != nil { metrics.IncrementMemCacheHitCounter("Status") } } else { missingUserIds = append(missingUserIds, userId) if metrics != nil { metrics.IncrementMemCacheMissCounter("Status") } } } if len(missingUserIds) > 0 { if result := <-Srv.Store.Status().GetByIds(missingUserIds); result.Err != nil { return nil, result.Err } else { statuses := result.Data.([]*model.Status) for _, s := range statuses { AddStatusCache(s) statusMap[s.UserId] = s.Status } } } // For the case where the user does not have a row in the Status table and cache for _, userId := range missingUserIds { if _, ok := statusMap[userId]; !ok { statusMap[userId] = model.STATUS_OFFLINE } } return statusMap, nil }
func GetSession(token string) (*model.Session, *model.AppError) { metrics := einterfaces.GetMetricsInterface() var session *model.Session if ts, ok := sessionCache.Get(token); ok { session = ts.(*model.Session) if metrics != nil { metrics.IncrementMemCacheHitCounter("Session") } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Session") } } if session == nil { if sessionResult := <-Srv.Store.Session().Get(token); sessionResult.Err != nil { return nil, model.NewLocAppError("GetSession", "api.context.invalid_token.error", map[string]interface{}{"Token": token, "Error": sessionResult.Err.DetailedError}, "") } else { session = sessionResult.Data.(*model.Session) if session.IsExpired() || session.Token != token { return nil, model.NewLocAppError("GetSession", "api.context.invalid_token.error", map[string]interface{}{"Token": token, "Error": sessionResult.Err.DetailedError}, "") } else { AddSessionToCache(session) return session, nil } } } if session == nil || session.IsExpired() { return nil, model.NewLocAppError("GetSession", "api.context.invalid_token.error", map[string]interface{}{"Token": token}, "") } return session, nil }
func runServer(configFileLocation string) { if errstr := doLoadConfig(configFileLocation); errstr != "" { l4g.Exit("Unable to load mattermost configuration file: ", errstr) return } utils.InitTranslations(utils.Cfg.LocalizationSettings) utils.TestConnection(utils.Cfg) pwd, _ := os.Getwd() l4g.Info(utils.T("mattermost.current_version"), model.CurrentVersion, model.BuildNumber, model.BuildDate, model.BuildHash, model.BuildHashEnterprise) l4g.Info(utils.T("mattermost.entreprise_enabled"), model.BuildEnterpriseReady) l4g.Info(utils.T("mattermost.working_dir"), pwd) l4g.Info(utils.T("mattermost.config_file"), utils.FindConfigFile(configFileLocation)) // Enable developer settings if this is a "dev" build if model.BuildNumber == "dev" { *utils.Cfg.ServiceSettings.EnableDeveloper = true } cmdUpdateDb30() app.NewServer() app.InitStores() api.InitRouter() api.InitApi() web.InitWeb() if model.BuildEnterpriseReady == "true" { api.LoadLicense() } if !utils.IsLicensed && len(utils.Cfg.SqlSettings.DataSourceReplicas) > 1 { l4g.Warn(utils.T("store.sql.read_replicas_not_licensed.critical")) utils.Cfg.SqlSettings.DataSourceReplicas = utils.Cfg.SqlSettings.DataSourceReplicas[:1] } if !utils.IsLicensed { utils.Cfg.TeamSettings.MaxNotificationsPerChannel = &MaxNotificationsPerChannelDefault } resetStatuses() app.StartServer() // If we allow testing then listen for manual testing URL hits if utils.Cfg.ServiceSettings.EnableTesting { manualtesting.InitManualTesting() } setDiagnosticId() go runSecurityAndDiagnosticsJob() if complianceI := einterfaces.GetComplianceInterface(); complianceI != nil { complianceI.StartComplianceDailyJob() } if einterfaces.GetClusterInterface() != nil { einterfaces.GetClusterInterface().StartInterNodeCommunication() } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().StartServer() } // wait for kill signal before attempting to gracefully shutdown // the running service c := make(chan os.Signal) signal.Notify(c, os.Interrupt, syscall.SIGINT, syscall.SIGTERM) <-c if einterfaces.GetClusterInterface() != nil { einterfaces.GetClusterInterface().StopInterNodeCommunication() } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().StopServer() } app.StopServer() }
func (s SqlPostStore) GetPostsSince(channelId string, time int64, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() if allowFromCache { // If the last post in the channel's time is less than or equal to the time we are getting posts since, // we can safely return no posts. if cacheItem, ok := lastPostTimeCache.Get(channelId); ok && cacheItem.(int64) <= time { if metrics != nil { metrics.IncrementMemCacheHitCounter("Last Post Time") } list := &model.PostList{Order: make([]string, 0, 0)} result.Data = list storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Post Time") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Post Time") } } var posts []*model.Post _, err := s.GetReplica().Select(&posts, `(SELECT * FROM Posts WHERE (UpdateAt > :Time AND ChannelId = :ChannelId) LIMIT 1000) UNION (SELECT * FROM Posts WHERE Id IN (SELECT * FROM (SELECT RootId FROM Posts WHERE UpdateAt > :Time AND ChannelId = :ChannelId LIMIT 1000) temp_tab)) ORDER BY CreateAt DESC`, map[string]interface{}{"ChannelId": channelId, "Time": time}) if err != nil { result.Err = model.NewLocAppError("SqlPostStore.GetPostsSince", "store.sql_post.get_posts_since.app_error", nil, "channelId="+channelId+err.Error()) } else { list := &model.PostList{Order: make([]string, 0, len(posts))} var latestUpdate int64 = 0 for _, p := range posts { list.AddPost(p) if p.UpdateAt > time { list.AddOrder(p.Id) } if latestUpdate < p.UpdateAt { latestUpdate = p.UpdateAt } } lastPostTimeCache.AddWithExpiresInSecs(channelId, latestUpdate, LAST_POST_TIME_CACHE_SEC) result.Data = list } storeChannel <- result close(storeChannel) }() return storeChannel }
func (s SqlPostStore) GetPosts(channelId string, offset int, limit int, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() if limit > 1000 { result.Err = model.NewLocAppError("SqlPostStore.GetLinearPosts", "store.sql_post.get_posts.app_error", nil, "channelId="+channelId) storeChannel <- result close(storeChannel) return } if allowFromCache && offset == 0 && limit == 60 { if cacheItem, ok := lastPostsCache.Get(channelId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Last Posts Cache") } result.Data = cacheItem.(*model.PostList) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Posts Cache") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Last Posts Cache") } } rpc := s.getRootPosts(channelId, offset, limit) cpc := s.getParentsPosts(channelId, offset, limit) if rpr := <-rpc; rpr.Err != nil { result.Err = rpr.Err } else if cpr := <-cpc; cpr.Err != nil { result.Err = cpr.Err } else { posts := rpr.Data.([]*model.Post) parents := cpr.Data.([]*model.Post) list := &model.PostList{Order: make([]string, 0, len(posts))} for _, p := range posts { list.AddPost(p) list.AddOrder(p.Id) } for _, p := range parents { list.AddPost(p) } list.MakeNonNil() if offset == 0 && limit == 60 { lastPostsCache.AddWithExpiresInSecs(channelId, list, LAST_POSTS_CACHE_SEC) } result.Data = list } storeChannel <- result close(storeChannel) }() return storeChannel }
func (us SqlUserStore) GetProfileByIds(userIds []string, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel, 1) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() var users []*model.User userMap := make(map[string]*model.User) props := make(map[string]interface{}) idQuery := "" remainingUserIds := make([]string, 0) if allowFromCache { for _, userId := range userIds { if cacheItem, ok := profileByIdsCache.Get(userId); ok { u := cacheItem.(*model.User) userMap[u.Id] = u } else { remainingUserIds = append(remainingUserIds, userId) } } if metrics != nil { metrics.AddMemCacheHitCounter("Profile By Ids", float64(len(userMap))) metrics.AddMemCacheMissCounter("Profile By Ids", float64(len(remainingUserIds))) } } else { remainingUserIds = userIds if metrics != nil { metrics.AddMemCacheMissCounter("Profile By Ids", float64(len(remainingUserIds))) } } // If everything came from the cache then just return if len(remainingUserIds) == 0 { result.Data = userMap storeChannel <- result close(storeChannel) return } for index, userId := range remainingUserIds { if len(idQuery) > 0 { idQuery += ", " } props["userId"+strconv.Itoa(index)] = userId idQuery += ":userId" + strconv.Itoa(index) } if _, err := us.GetReplica().Select(&users, "SELECT * FROM Users WHERE Users.Id IN ("+idQuery+")", props); err != nil { result.Err = model.NewLocAppError("SqlUserStore.GetProfileByIds", "store.sql_user.get_profiles.app_error", nil, err.Error()) } else { for _, u := range users { u.Password = "" u.AuthData = new(string) *u.AuthData = "" userMap[u.Id] = u profileByIdsCache.AddWithExpiresInSecs(u.Id, u, PROFILE_BY_IDS_CACHE_SEC) } result.Data = userMap } storeChannel <- result close(storeChannel) }() return storeChannel }
func sendPushNotification(post *model.Post, user *model.User, channel *model.Channel, senderName string, wasMentioned bool) *model.AppError { sessions, err := getMobileAppSessions(user.Id) if err != nil { return err } var channelName string if channel.Type == model.CHANNEL_DIRECT { channelName = senderName } else { channelName = channel.DisplayName } userLocale := utils.GetUserTranslations(user.Locale) msg := model.PushNotification{} if badge := <-Srv.Store.User().GetUnreadCount(user.Id); badge.Err != nil { msg.Badge = 1 l4g.Error(utils.T("store.sql_user.get_unread_count.app_error"), user.Id, badge.Err) } else { msg.Badge = int(badge.Data.(int64)) } msg.Type = model.PUSH_TYPE_MESSAGE msg.TeamId = channel.TeamId msg.ChannelId = channel.Id msg.ChannelName = channel.Name if *utils.Cfg.EmailSettings.PushNotificationContents == model.FULL_NOTIFICATION { if channel.Type == model.CHANNEL_DIRECT { msg.Category = model.CATEGORY_DM msg.Message = "@" + senderName + ": " + model.ClearMentionTags(post.Message) } else { msg.Message = senderName + userLocale("api.post.send_notifications_and_forget.push_in") + channelName + ": " + model.ClearMentionTags(post.Message) } } else { if channel.Type == model.CHANNEL_DIRECT { msg.Category = model.CATEGORY_DM msg.Message = senderName + userLocale("api.post.send_notifications_and_forget.push_message") } else if wasMentioned { msg.Message = senderName + userLocale("api.post.send_notifications_and_forget.push_mention") + channelName } else { msg.Message = senderName + userLocale("api.post.send_notifications_and_forget.push_non_mention") + channelName } } l4g.Debug(utils.T("api.post.send_notifications_and_forget.push_notification.debug"), msg.DeviceId, msg.Message) for _, session := range sessions { tmpMessage := *model.PushNotificationFromJson(strings.NewReader(msg.ToJson())) tmpMessage.SetDeviceIdAndPlatform(session.DeviceId) if err := sendToPushProxy(tmpMessage); err != nil { return err } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementPostSentPush() } } return nil }
func (h handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { now := time.Now() l4g.Debug("%v", r.URL.Path) c := &Context{} c.T, c.Locale = utils.GetTranslationsAndLocale(w, r) c.RequestId = model.NewId() c.IpAddress = GetIpAddress(r) c.TeamId = mux.Vars(r)["team_id"] token := "" isTokenFromQueryString := false // Attempt to parse token out of the header authHeader := r.Header.Get(model.HEADER_AUTH) if len(authHeader) > 6 && strings.ToUpper(authHeader[0:6]) == model.HEADER_BEARER { // Default session token token = authHeader[7:] } else if len(authHeader) > 5 && strings.ToLower(authHeader[0:5]) == model.HEADER_TOKEN { // OAuth token token = authHeader[6:] } // Attempt to parse the token from the cookie if len(token) == 0 { if cookie, err := r.Cookie(model.SESSION_COOKIE_TOKEN); err == nil { token = cookie.Value if (h.requireSystemAdmin || h.requireUser) && !h.trustRequester { if r.Header.Get(model.HEADER_REQUESTED_WITH) != model.HEADER_REQUESTED_WITH_XML { c.Err = model.NewLocAppError("ServeHTTP", "api.context.session_expired.app_error", nil, "token="+token+" Appears to bea CSRF attempt") token = "" } } } } // Attempt to parse token out of the query string if len(token) == 0 { token = r.URL.Query().Get("access_token") isTokenFromQueryString = true } if *utils.Cfg.ServiceSettings.SiteURL != "" { c.SetSiteURL(*utils.Cfg.ServiceSettings.SiteURL) } else { protocol := GetProtocol(r) c.SetSiteURL(protocol + "://" + r.Host) } w.Header().Set(model.HEADER_REQUEST_ID, c.RequestId) w.Header().Set(model.HEADER_VERSION_ID, fmt.Sprintf("%v.%v.%v", model.CurrentVersion, model.BuildNumber, utils.CfgHash)) if einterfaces.GetClusterInterface() != nil { w.Header().Set(model.HEADER_CLUSTER_ID, einterfaces.GetClusterInterface().GetClusterId()) } // Instruct the browser not to display us in an iframe unless is the same origin for anti-clickjacking if !h.isApi { w.Header().Set("X-Frame-Options", "SAMEORIGIN") w.Header().Set("Content-Security-Policy", "frame-ancestors 'self'") } else { // All api response bodies will be JSON formatted by default w.Header().Set("Content-Type", "application/json") if r.Method == "GET" { w.Header().Set("Expires", "0") } } if len(token) != 0 { session := GetSession(token) if session == nil || session.IsExpired() { c.RemoveSessionCookie(w, r) if h.requireUser || h.requireSystemAdmin { c.Err = model.NewLocAppError("ServeHTTP", "api.context.session_expired.app_error", nil, "token="+token) c.Err.StatusCode = http.StatusUnauthorized } } else if !session.IsOAuth && isTokenFromQueryString { c.Err = model.NewLocAppError("ServeHTTP", "api.context.token_provided.app_error", nil, "token="+token) c.Err.StatusCode = http.StatusUnauthorized } else { c.Session = *session } } if h.isApi || h.isTeamIndependent { c.setTeamURL(c.GetSiteURL(), false) c.Path = r.URL.Path } else { splitURL := strings.Split(r.URL.Path, "/") c.setTeamURL(c.GetSiteURL()+"/"+splitURL[1], true) c.Path = "/" + strings.Join(splitURL[2:], "/") } if c.Err == nil && h.requireUser { c.UserRequired() } if c.Err == nil && h.requireSystemAdmin { c.SystemAdminRequired() } if c.Err == nil && h.isUserActivity && token != "" && len(c.Session.UserId) > 0 { SetStatusOnline(c.Session.UserId, c.Session.Id, false) } if c.Err == nil { h.handleFunc(c, w, r) } // Handle errors that have occoured if c.Err != nil { c.Err.Translate(c.T) c.Err.RequestId = c.RequestId c.LogError(c.Err) c.Err.Where = r.URL.Path // Block out detailed error when not in developer mode if !*utils.Cfg.ServiceSettings.EnableDeveloper { c.Err.DetailedError = "" } if h.isApi { w.WriteHeader(c.Err.StatusCode) w.Write([]byte(c.Err.ToJson())) if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementHttpError() } } else { if c.Err.StatusCode == http.StatusUnauthorized { http.Redirect(w, r, c.GetTeamURL()+"/?redirect="+url.QueryEscape(r.URL.Path), http.StatusTemporaryRedirect) } else { RenderWebError(c.Err, w, r) } } } if h.isApi && einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementHttpRequest() if r.URL.Path != model.API_URL_SUFFIX+"/users/websocket" { elapsed := float64(time.Since(now)) / float64(time.Second) einterfaces.GetMetricsInterface().ObserveHttpRequestDuration(elapsed) } } }
func CreatePost(post *model.Post, teamId string, triggerWebhooks bool) (*model.Post, *model.AppError) { var pchan store.StoreChannel if len(post.RootId) > 0 { pchan = Srv.Store.Post().Get(post.RootId) } // Verify the parent/child relationships are correct if pchan != nil { if presult := <-pchan; presult.Err != nil { return nil, model.NewLocAppError("createPost", "api.post.create_post.root_id.app_error", nil, "") } else { list := presult.Data.(*model.PostList) if len(list.Posts) == 0 || !list.IsChannelId(post.ChannelId) { return nil, model.NewLocAppError("createPost", "api.post.create_post.channel_root_id.app_error", nil, "") } if post.ParentId == "" { post.ParentId = post.RootId } if post.RootId != post.ParentId { parent := list.Posts[post.ParentId] if parent == nil { return nil, model.NewLocAppError("createPost", "api.post.create_post.parent_id.app_error", nil, "") } } } } post.Hashtags, _ = model.ParseHashtags(post.Message) var rpost *model.Post if result := <-Srv.Store.Post().Save(post); result.Err != nil { return nil, result.Err } else { rpost = result.Data.(*model.Post) } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementPostCreate() } if len(post.FileIds) > 0 { // There's a rare bug where the client sends up duplicate FileIds so protect against that post.FileIds = utils.RemoveDuplicatesFromStringArray(post.FileIds) for _, fileId := range post.FileIds { if result := <-Srv.Store.FileInfo().AttachToPost(fileId, post.Id); result.Err != nil { l4g.Error(utils.T("api.post.create_post.attach_files.error"), post.Id, post.FileIds, post.UserId, result.Err) } } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementPostFileAttachment(len(post.FileIds)) } } InvalidateCacheForChannel(rpost.ChannelId) InvalidateCacheForChannelPosts(rpost.ChannelId) if err := handlePostEvents(rpost, teamId, triggerWebhooks); err != nil { return nil, err } return rpost, nil }
func (us SqlUserStore) GetProfilesInChannel(channelId string, offset int, limit int, allowFromCache bool) StoreChannel { storeChannel := make(StoreChannel) go func() { result := StoreResult{} metrics := einterfaces.GetMetricsInterface() if allowFromCache && offset == -1 && limit == -1 { if cacheItem, ok := profilesInChannelCache.Get(channelId); ok { if metrics != nil { metrics.IncrementMemCacheHitCounter("Profiles in Channel") } result.Data = cacheItem.(map[string]*model.User) storeChannel <- result close(storeChannel) return } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Profiles in Channel") } } } else { if metrics != nil { metrics.IncrementMemCacheMissCounter("Profiles in Channel") } } var users []*model.User query := "SELECT Users.* FROM Users, ChannelMembers WHERE ChannelMembers.ChannelId = :ChannelId AND Users.Id = ChannelMembers.UserId AND Users.DeleteAt = 0" if limit >= 0 && offset >= 0 { query += " ORDER BY Users.Username ASC LIMIT :Limit OFFSET :Offset" } if _, err := us.GetReplica().Select(&users, query, map[string]interface{}{"ChannelId": channelId, "Offset": offset, "Limit": limit}); err != nil { result.Err = model.NewLocAppError("SqlUserStore.GetProfilesInChannel", "store.sql_user.get_profiles.app_error", nil, err.Error()) } else { userMap := make(map[string]*model.User) for _, u := range users { u.Password = "" u.AuthData = new(string) *u.AuthData = "" userMap[u.Id] = u } result.Data = userMap if allowFromCache && offset == -1 && limit == -1 { profilesInChannelCache.AddWithExpiresInSecs(channelId, userMap, PROFILES_IN_CHANNEL_CACHE_SEC) } } storeChannel <- result close(storeChannel) }() return storeChannel }
func sendNotificationEmail(post *model.Post, user *model.User, channel *model.Channel, team *model.Team, senderName string, sender *model.User) *model.AppError { if channel.Type == model.CHANNEL_DIRECT && channel.TeamId != team.Id { // this message is a cross-team DM so it we need to find a team that the recipient is on to use in the link if result := <-Srv.Store.Team().GetTeamsByUserId(user.Id); result.Err != nil { return result.Err } else { // if the recipient isn't in the current user's team, just pick one teams := result.Data.([]*model.Team) found := false for i := range teams { if teams[i].Id == team.Id { found = true break } } if !found && len(teams) > 0 { team = teams[0] } else { // in case the user hasn't joined any teams we send them to the select_team page team = &model.Team{Name: "select_team", DisplayName: utils.Cfg.TeamSettings.SiteName} } } } if *utils.Cfg.EmailSettings.EnableEmailBatching { var sendBatched bool if result := <-Srv.Store.Preference().Get(user.Id, model.PREFERENCE_CATEGORY_NOTIFICATIONS, model.PREFERENCE_NAME_EMAIL_INTERVAL); result.Err != nil { // if the call fails, assume it hasn't been set and use the default sendBatched = false } else { // default to not using batching if the setting is set to immediate sendBatched = result.Data.(model.Preference).Value != model.PREFERENCE_DEFAULT_EMAIL_INTERVAL } if sendBatched { if err := AddNotificationEmailToBatch(user, post, team); err == nil { return nil } } // fall back to sending a single email if we can't batch it for some reason } var channelName string var bodyText string var subjectText string var mailTemplate string var mailParameters map[string]interface{} teamURL := utils.GetSiteURL() + "/" + team.Name tm := time.Unix(post.CreateAt/1000, 0) userLocale := utils.GetUserTranslations(user.Locale) month := userLocale(tm.Month().String()) day := fmt.Sprintf("%d", tm.Day()) year := fmt.Sprintf("%d", tm.Year()) zone, _ := tm.Zone() if channel.Type == model.CHANNEL_DIRECT { bodyText = userLocale("api.post.send_notifications_and_forget.message_body") subjectText = userLocale("api.post.send_notifications_and_forget.message_subject") senderDisplayName := senderName mailTemplate = "api.templates.post_subject_in_direct_message" mailParameters = map[string]interface{}{"SubjectText": subjectText, "TeamDisplayName": team.DisplayName, "SenderDisplayName": senderDisplayName, "Month": month, "Day": day, "Year": year} } else { bodyText = userLocale("api.post.send_notifications_and_forget.mention_body") subjectText = userLocale("api.post.send_notifications_and_forget.mention_subject") channelName = channel.DisplayName mailTemplate = "api.templates.post_subject_in_channel" mailParameters = map[string]interface{}{"SubjectText": subjectText, "TeamDisplayName": team.DisplayName, "ChannelName": channelName, "Month": month, "Day": day, "Year": year} } subject := fmt.Sprintf("[%v] %v", utils.Cfg.TeamSettings.SiteName, userLocale(mailTemplate, mailParameters)) bodyPage := utils.NewHTMLTemplate("post_body", user.Locale) bodyPage.Props["SiteURL"] = utils.GetSiteURL() bodyPage.Props["PostMessage"] = GetMessageForNotification(post, userLocale) if team.Name != "select_team" { bodyPage.Props["TeamLink"] = teamURL + "/pl/" + post.Id } else { bodyPage.Props["TeamLink"] = teamURL } bodyPage.Props["BodyText"] = bodyText bodyPage.Props["Button"] = userLocale("api.templates.post_body.button") bodyPage.Html["Info"] = template.HTML(userLocale("api.templates.post_body.info", map[string]interface{}{"ChannelName": channelName, "SenderName": senderName, "Hour": fmt.Sprintf("%02d", tm.Hour()), "Minute": fmt.Sprintf("%02d", tm.Minute()), "TimeZone": zone, "Month": month, "Day": day})) if err := utils.SendMail(user.Email, html.UnescapeString(subject), bodyPage.Render()); err != nil { return err } if einterfaces.GetMetricsInterface() != nil { einterfaces.GetMetricsInterface().IncrementPostSentEmail() } return nil }