func TestDataCopySubscriptionsForUserAsJSON(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, newUser()) if err != nil { t.Fatal(err) } buffer := &bytes.Buffer{} err = data.CopySubscriptionsForUserAsJSON(pool, buffer, userID) if err != nil { t.Fatalf("Failed when no subscriptions: %v", err) } err = data.InsertSubscription(pool, userID, "http://foo") if err != nil { t.Fatal(err) } buffer.Reset() err = data.CopySubscriptionsForUserAsJSON(pool, buffer, userID) if err != nil { t.Fatal(err) } if bytes.Contains(buffer.Bytes(), []byte("foo")) != true { t.Errorf("Expected %v, got %v", true, bytes.Contains(buffer.Bytes(), []byte("foo"))) } }
func TestDataSubscriptions(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, newUser()) if err != nil { t.Fatal(err) } url := "http://foo" err = data.InsertSubscription(pool, userID, url) if err != nil { t.Fatal(err) } subscriptions, err := data.SelectSubscriptions(pool, userID) if err != nil { t.Fatal(err) } if len(subscriptions) != 1 { t.Fatalf("Found %d subscriptions, expected 1", len(subscriptions)) } if subscriptions[0].URL.Value != url { t.Fatalf("Expected %v, got %v", url, subscriptions[0].URL) } }
func CreateSubscriptionHandler(w http.ResponseWriter, req *http.Request, env *environment) { var subscription struct { URL string `json:"url"` } decoder := json.NewDecoder(req.Body) if err := decoder.Decode(&subscription); err != nil { w.WriteHeader(422) fmt.Fprintf(w, "Error decoding request: %v", err) return } if subscription.URL == "" { w.WriteHeader(422) fmt.Fprintln(w, `Request must include the attribute "url"`) return } if err := data.InsertSubscription(env.pool, env.user.ID.Value, subscription.URL); err != nil { w.WriteHeader(422) fmt.Fprintln(w, `Bad user name or password`) return } w.WriteHeader(http.StatusCreated) }
func TestDataDeleteSubscription(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, newUser()) if err != nil { t.Fatal(err) } err = data.InsertSubscription(pool, userID, "http://foo") if err != nil { t.Fatal(err) } subscriptions, err := data.SelectSubscriptions(pool, userID) if err != nil { t.Fatal(err) } if len(subscriptions) != 1 { t.Fatalf("Found %d subscriptions, expected 1", len(subscriptions)) } feedID := subscriptions[0].FeedID.Value update := &data.ParsedFeed{Name: "baz", Items: []data.ParsedItem{ {URL: "http://baz/bar", Title: "Baz", PublicationTime: data.NewTime(time.Now())}, }} nullString := data.String{Status: data.Null} err = data.UpdateFeedWithFetchSuccess(pool, feedID, update, nullString, time.Now().Add(-20*time.Minute)) if err != nil { t.Fatal(err) } err = data.DeleteSubscription(pool, userID, feedID) if err != nil { t.Fatal(err) } subscriptions, err = data.SelectSubscriptions(pool, userID) if err != nil { t.Fatal(err) } if len(subscriptions) != 0 { t.Errorf("Found %d subscriptions, expected 0", len(subscriptions)) } // feed should have been deleted as it was the last user staleFeeds, err := data.GetFeedsUncheckedSince(pool, time.Now()) if err != nil { t.Fatal(err) } if len(staleFeeds) != 0 { t.Errorf("Found %d staleFeeds, expected 0", len(staleFeeds)) } }
func ImportFeedsHandler(w http.ResponseWriter, req *http.Request, env *environment) { file, _, err := req.FormFile("file") if err != nil { w.WriteHeader(422) fmt.Fprintln(w, `No uploaded file found`) return } defer file.Close() var doc OpmlDocument err = xml.NewDecoder(file).Decode(&doc) if err != nil { w.WriteHeader(422) fmt.Fprintln(w, "Error parsing OPML upload") return } type subscriptionResult struct { Title string `json:"title"` URL string `json:"url"` Success bool `json:"success"` } results := make([]subscriptionResult, 0, len(doc.Body.Outlines)) resultsChan := make(chan subscriptionResult) for _, outline := range doc.Body.Outlines { go func(outline OpmlOutline) { r := subscriptionResult{Title: outline.Title, URL: outline.URL} err := data.InsertSubscription(env.pool, env.user.ID.Value, outline.URL) r.Success = err == nil resultsChan <- r }(outline) } for _ = range doc.Body.Outlines { r := <-resultsChan results = append(results, r) } w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(results) }
func TestExportOPML(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, &data.User{ Name: data.NewString("test"), Email: data.NewString("*****@*****.**"), PasswordDigest: data.NewBytes([]byte("digest")), PasswordSalt: data.NewBytes([]byte("salt")), }) if err != nil { t.Fatal(err) } err = data.InsertSubscription(pool, userID, "http://example.com/feed.rss") if err != nil { t.Fatal(err) } req, err := http.NewRequest("GET", "http://example.com/", nil) if err != nil { t.Fatal(err) } env := &environment{pool: pool} env.user = &data.User{ID: data.NewInt32(userID), Name: data.NewString("test")} w := httptest.NewRecorder() ExportFeedsHandler(w, req, env) if w.Code != 200 { t.Fatalf("Expected HTTP status 200, instead received %d", w.Code) } expected := `<?xml version="1.0" encoding="UTF-8"?> <opml version="1.0"><head><title>The Pithy Reader Export for test</title></head><body><outline text="http://example.com/feed.rss" title="http://example.com/feed.rss" type="rss" xmlUrl="http://example.com/feed.rss"></outline></body></opml>` if w.Body.String() != expected { t.Fatalf("Expected:\n%s\nGot:\n%s\n", expected, w.Body.String()) } }
// This function is a nasty copy and paste of testRepositoryUpdateFeedWithFetchSuccess // Fix me when refactoring tests func TestDataUpdateFeedWithFetchSuccessWithoutPublicationTime(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, newUser()) if err != nil { t.Fatal(err) } now := time.Now() url := "http://bar" err = data.InsertSubscription(pool, userID, url) if err != nil { t.Fatal(err) } subscriptions, err := data.SelectSubscriptions(pool, userID) if err != nil { t.Fatal(err) } if len(subscriptions) != 1 { t.Fatalf("Found %d subscriptions, expected 1", len(subscriptions)) } feedID := subscriptions[0].FeedID.Value update := &data.ParsedFeed{Name: "baz", Items: []data.ParsedItem{ {URL: "http://baz/bar", Title: "Baz"}, }} nullString := data.String{Status: data.Null} err = data.UpdateFeedWithFetchSuccess(pool, feedID, update, nullString, now) if err != nil { t.Fatal(err) } buffer := &bytes.Buffer{} err = data.CopyUnreadItemsAsJSONByUserID(pool, buffer, userID) if err != nil { t.Fatal(err) } type UnreadItemsFromJSON struct { ID int32 `json:id` } var unreadItems []UnreadItemsFromJSON err = json.Unmarshal(buffer.Bytes(), &unreadItems) if err != nil { t.Fatal(err) } if len(unreadItems) != 1 { t.Fatalf("Found %d unreadItems, expected 1", len(unreadItems)) } // Update again and ensure item does not get created again err = data.UpdateFeedWithFetchSuccess(pool, feedID, update, nullString, now) if err != nil { t.Fatal(err) } buffer.Reset() err = data.CopyUnreadItemsAsJSONByUserID(pool, buffer, userID) if err != nil { t.Fatal(err) } err = json.Unmarshal(buffer.Bytes(), &unreadItems) if err != nil { t.Fatal(err) } if len(unreadItems) != 1 { t.Fatalf("Found %d unreadItems, expected 1", len(unreadItems)) } }
func TestDataFeeds(t *testing.T) { pool := newConnPool(t) userID, err := data.CreateUser(pool, newUser()) if err != nil { t.Fatal(err) } now := time.Now() fiveMinutesAgo := now.Add(-5 * time.Minute) tenMinutesAgo := now.Add(-10 * time.Minute) fifteenMinutesAgo := now.Add(-15 * time.Minute) update := &data.ParsedFeed{Name: "baz", Items: make([]data.ParsedItem, 0)} // Create a feed url := "http://bar" err = data.InsertSubscription(pool, userID, url) if err != nil { t.Fatal(err) } // A new feed has never been fetched -- it should need fetching staleFeeds, err := data.GetFeedsUncheckedSince(pool, tenMinutesAgo) if err != nil { t.Fatal(err) } if len(staleFeeds) != 1 { t.Fatalf("Found %d stale feed, expected 1", len(staleFeeds)) } if staleFeeds[0].URL.Value != url { t.Errorf("Expected %v, got %v", url, staleFeeds[0].URL) } feedID := staleFeeds[0].ID.Value nullString := data.String{Status: data.Null} // Update feed as of now err = data.UpdateFeedWithFetchSuccess(pool, feedID, update, nullString, now) if err != nil { t.Fatal(err) } // feed should no longer be stale staleFeeds, err = data.GetFeedsUncheckedSince(pool, tenMinutesAgo) if err != nil { t.Fatal(err) } if len(staleFeeds) != 0 { t.Fatalf("Found %d stale feed, expected 0", len(staleFeeds)) } // Update feed to be old enough to need refresh err = data.UpdateFeedWithFetchSuccess(pool, feedID, update, nullString, fifteenMinutesAgo) if err != nil { t.Fatal(err) } // It should now need fetching staleFeeds, err = data.GetFeedsUncheckedSince(pool, tenMinutesAgo) if err != nil { t.Fatal(err) } if len(staleFeeds) != 1 { t.Fatalf("Found %d stale feed, expected 1", len(staleFeeds)) } if staleFeeds[0].ID.Value != feedID { t.Errorf("Expected %v, got %v", feedID, staleFeeds[0].ID) } // But update feed with a recent failed fetch err = data.UpdateFeedWithFetchFailure(pool, feedID, "something went wrong", fiveMinutesAgo) if err != nil { t.Fatal(err) } // feed should no longer be stale staleFeeds, err = data.GetFeedsUncheckedSince(pool, tenMinutesAgo) if err != nil { t.Fatal(err) } if len(staleFeeds) != 0 { t.Fatalf("Found %d stale feed, expected 0", len(staleFeeds)) } }