mirror of
https://github.com/christianselig/apollo-backend
synced 2024-11-22 19:57:43 +00:00
don't alert user posts in private subreddits
This commit is contained in:
parent
38e596b27e
commit
009d60dc2f
4 changed files with 7374 additions and 17 deletions
7335
internal/reddit/testdata/user_posts.json
vendored
Normal file
7335
internal/reddit/testdata/user_posts.json
vendored
Normal file
File diff suppressed because one or more lines are too long
|
@ -61,23 +61,24 @@ func NewMeResponse(val *fastjson.Value) interface{} {
|
||||||
}
|
}
|
||||||
|
|
||||||
type Thing struct {
|
type Thing struct {
|
||||||
Kind string `json:"kind"`
|
Kind string `json:"kind"`
|
||||||
ID string `json:"id"`
|
ID string `json:"id"`
|
||||||
Type string `json:"type"`
|
Type string `json:"type"`
|
||||||
Author string `json:"author"`
|
Author string `json:"author"`
|
||||||
Subject string `json:"subject"`
|
Subject string `json:"subject"`
|
||||||
Body string `json:"body"`
|
Body string `json:"body"`
|
||||||
CreatedAt float64 `json:"created_utc"`
|
CreatedAt float64 `json:"created_utc"`
|
||||||
Context string `json:"context"`
|
Context string `json:"context"`
|
||||||
ParentID string `json:"parent_id"`
|
ParentID string `json:"parent_id"`
|
||||||
LinkTitle string `json:"link_title"`
|
LinkTitle string `json:"link_title"`
|
||||||
Destination string `json:"dest"`
|
Destination string `json:"dest"`
|
||||||
Subreddit string `json:"subreddit"`
|
Subreddit string `json:"subreddit"`
|
||||||
Score int64 `json:"score"`
|
SubredditType string `json:"subreddit_type"`
|
||||||
SelfText string `json:"selftext"`
|
Score int64 `json:"score"`
|
||||||
Title string `json:"title"`
|
SelfText string `json:"selftext"`
|
||||||
URL string `json:"url"`
|
Title string `json:"title"`
|
||||||
Flair string `json:"flair"`
|
URL string `json:"url"`
|
||||||
|
Flair string `json:"flair"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *Thing) FullName() string {
|
func (t *Thing) FullName() string {
|
||||||
|
@ -102,6 +103,7 @@ func NewThing(val *fastjson.Value) *Thing {
|
||||||
t.LinkTitle = string(data.GetStringBytes("link_title"))
|
t.LinkTitle = string(data.GetStringBytes("link_title"))
|
||||||
t.Destination = string(data.GetStringBytes("dest"))
|
t.Destination = string(data.GetStringBytes("dest"))
|
||||||
t.Subreddit = string(data.GetStringBytes("subreddit"))
|
t.Subreddit = string(data.GetStringBytes("subreddit"))
|
||||||
|
t.SubredditType = string(data.GetStringBytes("subreddit_type"))
|
||||||
|
|
||||||
t.Score = data.GetInt64("score")
|
t.Score = data.GetInt64("score")
|
||||||
t.Title = string(data.GetStringBytes("title"))
|
t.Title = string(data.GetStringBytes("title"))
|
||||||
|
|
|
@ -128,3 +128,19 @@ func TestUserResponseParsing(t *testing.T) {
|
||||||
assert.Equal(t, "changelog", u.Name)
|
assert.Equal(t, "changelog", u.Name)
|
||||||
assert.Equal(t, true, u.AcceptFollowers)
|
assert.Equal(t, true, u.AcceptFollowers)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUserPostsParsing(t *testing.T) {
|
||||||
|
bb, err := ioutil.ReadFile("testdata/user_posts.json")
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
val, err := parser.ParseBytes(bb)
|
||||||
|
assert.NoError(t, err)
|
||||||
|
|
||||||
|
ret := NewListingResponse(val)
|
||||||
|
ps := ret.(*ListingResponse)
|
||||||
|
assert.NotNil(t, ps)
|
||||||
|
|
||||||
|
post := ps.Children[0]
|
||||||
|
|
||||||
|
assert.Equal(t, "public", post.SubredditType)
|
||||||
|
}
|
||||||
|
|
|
@ -220,6 +220,10 @@ func (uc *usersConsumer) Consume(delivery rmq.Delivery) {
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, post := range posts.Children {
|
for _, post := range posts.Children {
|
||||||
|
if post.SubredditType == "private" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
notification := &apns2.Notification{}
|
notification := &apns2.Notification{}
|
||||||
notification.Topic = "com.christianselig.Apollo"
|
notification.Topic = "com.christianselig.Apollo"
|
||||||
notification.Payload = payloadFromUserPost(post)
|
notification.Payload = payloadFromUserPost(post)
|
||||||
|
|
Loading…
Reference in a new issue