-
Notifications
You must be signed in to change notification settings - Fork 1
Notifications endpoint #53
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
14 commits
Select commit
Hold shift + click to select a range
40af715
replace numeric ids with hash ids in json.
stereosteve 17ca1dc
notifs kind of work.
stereosteve 5d06591
Merge branch 'trashify' into sp-notifs
stereosteve 8f8a306
hashids in notifs
stereosteve 70cf7d0
HashifyJson... a name you can trust!
stereosteve 3e7e609
Merge remote-tracking branch 'origin/main' into sp-notifs
stereosteve 53ea4d6
order notifs
stereosteve 0f2a252
notifs: unread count
stereosteve c2d734d
Match null behavior for seen_at
stereosteve dccf054
move notif query out of sqlc
stereosteve 2c0a2f2
support valid_types param
stereosteve 320bf8e
fix unread count (ignore limit=0)
stereosteve 176e67b
notif playlist_id is a string in the swagger
stereosteve baf6922
cleanup
stereosteve File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,173 @@ | ||
| package api | ||
|
|
||
| import ( | ||
| "encoding/json" | ||
| "slices" | ||
|
|
||
| "bridgerton.audius.co/trashid" | ||
| "github.com/gofiber/fiber/v2" | ||
| "github.com/jackc/pgx/v5" | ||
| ) | ||
|
|
||
| func (app *ApiServer) v1Notifications(c *fiber.Ctx) error { | ||
|
|
||
| sql := ` | ||
| WITH user_seen as ( | ||
| SELECT | ||
| LAG(seen_at, 1, now()::timestamp) OVER ( ORDER BY seen_at desc ) AS seen_at, | ||
| seen_at as prev_seen_at | ||
| FROM | ||
| notification_seen | ||
| WHERE | ||
| user_id = @user_id | ||
| ORDER BY | ||
| seen_at desc | ||
| ), | ||
| user_created_at as ( | ||
| SELECT | ||
| created_at | ||
| FROM | ||
| users | ||
| WHERE | ||
| user_id = @user_id | ||
| AND is_current | ||
| ) | ||
| SELECT | ||
| n.type, | ||
| n.group_id as group_id, | ||
| json_agg( | ||
| json_build_object( | ||
| 'type', type, | ||
| 'specifier', specifier, | ||
| 'timestamp', EXTRACT(EPOCH FROM timestamp), | ||
| 'data', data | ||
| ) | ||
| ORDER BY timestamp DESC | ||
| )::jsonb as actions, | ||
| CASE | ||
| WHEN user_seen.seen_at is not NULL THEN now()::timestamp != user_seen.seen_at | ||
| ELSE EXISTS(SELECT 1 from notification_seen ns where ns.user_id = @user_id) | ||
| END::boolean as is_seen, | ||
|
|
||
| CASE | ||
| WHEN user_seen.seen_at != now()::timestamp THEN EXTRACT(EPOCH FROM user_seen.seen_at) | ||
| ELSE null | ||
| END AS seen_at | ||
|
|
||
| FROM | ||
| notification n | ||
| LEFT JOIN user_seen on | ||
| user_seen.seen_at >= n.timestamp and user_seen.prev_seen_at < n.timestamp | ||
| WHERE | ||
| ((ARRAY[@user_id] && n.user_ids) OR (n.type = 'announcement' AND n.timestamp > (SELECT created_at FROM user_created_at))) | ||
| AND n.type = ANY(@valid_types) | ||
| GROUP BY | ||
| n.type, n.group_id, user_seen.seen_at, user_seen.prev_seen_at | ||
| ORDER BY | ||
| user_seen.seen_at desc NULLS LAST, | ||
| max(n.timestamp) desc, | ||
| n.group_id desc | ||
| limit @limit::int | ||
| ; | ||
| ` | ||
|
|
||
| // default types are always enabled | ||
| validTypes := []string{ | ||
|
stereosteve marked this conversation as resolved.
|
||
| "follow", | ||
| "repost", | ||
| "save", | ||
| "tip_send", | ||
| "tip_receive", | ||
| "track_added_to_purchased_album", | ||
| "track_added_to_playlist", | ||
| "tastemaker", | ||
| "supporter_rank_up", | ||
| "supporting_rank_up", | ||
| "supporter_dethroned", | ||
| "challenge_reward", | ||
| "claimable_reward", | ||
| "tier_change", | ||
| "create", | ||
| "remix", | ||
| "cosign", | ||
| "trending_playlist", | ||
| "trending", | ||
| "trending_underground", | ||
| "milestone", | ||
| "announcement", | ||
| "reaction", | ||
| "repost_of_repost", | ||
| "save_of_repost", | ||
| "usdc_purchase_seller", | ||
| "usdc_purchase_buyer", | ||
| "request_manager", | ||
| "approve_manager_request", | ||
| "comment", | ||
| "comment_thread", | ||
| "comment_mention", | ||
| "comment_reaction", | ||
| "listen_streak_reminder", | ||
| "fan_remix_contest_ended", | ||
| "artist_remix_contest_ended", | ||
| "artist_remix_contest_ending_soon", | ||
| "fan_remix_contest_ending_soon", | ||
| "fan_remix_contest_winners_selected", | ||
| "fan_remix_contest_started", | ||
| "artist_remix_contest_submissions", | ||
| } | ||
|
|
||
| // add optional valid_types | ||
| for _, t := range queryMulti(c, "valid_types") { | ||
| if !slices.Contains(validTypes, t) { | ||
| validTypes = append(validTypes, t) | ||
| } | ||
| } | ||
|
|
||
| userId := app.getUserId(c) | ||
| limit := c.QueryInt("limit", 20) | ||
|
|
||
| // python returns 20 items when limit=0 | ||
| // and client relies on this for showing unread count | ||
| if limit == 0 { | ||
| limit = 20 | ||
| } | ||
|
|
||
| type GetNotifsRow struct { | ||
| Type string `json:"type"` | ||
| GroupID string `json:"group_id"` | ||
| Actions json.RawMessage `json:"actions"` | ||
| IsSeen bool `json:"is_seen"` | ||
| SeenAt interface{} `json:"seen_at"` | ||
| } | ||
|
|
||
| rows, err := app.pool.Query(c.Context(), sql, pgx.NamedArgs{ | ||
| "user_id": userId, | ||
| "limit": limit, | ||
| "valid_types": validTypes, | ||
| }) | ||
| if err != nil { | ||
| return err | ||
| } | ||
|
|
||
| notifs, err := pgx.CollectRows(rows, pgx.RowToStructByNameLax[GetNotifsRow]) | ||
| if err != nil { | ||
| return err | ||
| } | ||
|
|
||
| unreadCount := 0 | ||
| for idx, notif := range notifs { | ||
| notif.Actions = trashid.HashifyJson(notif.Actions) | ||
| notifs[idx] = notif | ||
| if !notif.IsSeen { | ||
| unreadCount++ | ||
| } | ||
| } | ||
|
|
||
| return c.JSON(fiber.Map{ | ||
| "data": fiber.Map{ | ||
| "notifications": notifs, | ||
| "unread_count": unreadCount, | ||
| }, | ||
| }) | ||
|
|
||
| } | ||
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,43 @@ | ||
| package trashid | ||
|
|
||
| import ( | ||
| "bytes" | ||
| "fmt" | ||
| "regexp" | ||
| "strconv" | ||
| ) | ||
|
|
||
| var re = regexp.MustCompile(`"(?P<key>\w+_id|id|specifier)"\s*:\s*(?P<val>\d+)`) | ||
| var skipKeys = [][]byte{ | ||
| []byte(`special_id`), | ||
| } | ||
|
|
||
| func HashifyJson(jsonBytes []byte) []byte { | ||
| return re.ReplaceAllFunc(jsonBytes, func(match []byte) []byte { | ||
| submatches := re.FindSubmatchIndex(match) | ||
| if submatches == nil || len(submatches) < 6 { | ||
| return match | ||
| } | ||
|
|
||
| // Extract key and value from match using named groups | ||
| key := match[submatches[2]:submatches[3]] | ||
| for _, skipKey := range skipKeys { | ||
| if bytes.Equal(key, skipKey) { | ||
| return match | ||
| } | ||
| } | ||
|
|
||
| val := match[submatches[4]:submatches[5]] | ||
| num, err := strconv.Atoi(string(val)) | ||
| if err != nil { | ||
| return match | ||
| } | ||
|
|
||
| // Replace with hex string | ||
| hashed, err := EncodeHashId(num) | ||
| if err != nil { | ||
| return match | ||
| } | ||
| return []byte(fmt.Sprintf(`"%s": "%s"`, key, hashed)) | ||
| }) | ||
| } |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,63 @@ | ||
| package trashid | ||
|
|
||
| import ( | ||
| "encoding/json" | ||
| "testing" | ||
|
|
||
| "github.com/stretchr/testify/assert" | ||
| "github.com/tidwall/gjson" | ||
| ) | ||
|
|
||
| func TestHashifyJson(t *testing.T) { | ||
| j1 := []byte(` | ||
| { | ||
| "data": { | ||
| "id": 1, | ||
| "user_id": 2, | ||
| "special_id": 999, | ||
| "tracks": [ | ||
| { | ||
| "id": 3, | ||
| "title": "fun", | ||
| "value": "id", | ||
| "other": "user_id", | ||
| "good_idea": 333, | ||
| "ida": 111 | ||
| }, | ||
| { | ||
| "id": 4, | ||
| "title": "fun", | ||
| "value": "id", | ||
| "other": "user_id", | ||
| "good_idea": 333, | ||
| "ida": 111 | ||
| } | ||
| ] | ||
| } | ||
| } | ||
| `) | ||
|
|
||
| var m map[string]any | ||
| err := json.Unmarshal(j1, &m) | ||
| assert.NoError(t, err) | ||
|
|
||
| j2 := HashifyJson(j1) | ||
|
|
||
| expectations := map[string]string{ | ||
| "data.id": "7eP5n", | ||
| "data.user_id": "ML51L", | ||
| "data.special_id": "999", | ||
| "data.tracks.0.id": "lebQD", | ||
| "data.tracks.0.value": "id", | ||
| "data.tracks.0.other": "user_id", | ||
| "data.tracks.0.good_idea": "333", | ||
| "data.tracks.1.id": "ELKzn", | ||
| } | ||
| for path, exp := range expectations { | ||
| assert.Equal(t, exp, gjson.GetBytes(j2, path).String()) | ||
| } | ||
|
|
||
| err = json.Unmarshal(j2, &m) | ||
| assert.NoError(t, err) | ||
|
|
||
| } |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I didn't know about LAG... that's super cool, though isn't this backwards? shouldn't the lag be prev_seen_at?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Yeah this does look backwards... it was mostly taken verbatim from get_notifications.py
I think it would be more correct to just flip the names tho... I'll try that out.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Ok I get it now... the query is ordered by
seen_at desc...so with that ordering, the row before the current row is a greater timestamp.
So I think the logic and naming is OK.