diff --git a/public/css/fontello.css b/public/css/fontello.css index d022bb5..2453575 100644 --- a/public/css/fontello.css +++ b/public/css/fontello.css @@ -1,16 +1,15 @@ @font-face { font-family: 'fontello'; - src: url('/fonts/fontello.eot?21002321'); - src: url('/fonts/fontello.eot?21002321#iefix') format('embedded-opentype'), - url('/fonts/fontello.woff2?21002321') format('woff2'), - url('/fonts/fontello.woff?21002321') format('woff'), - url('/fonts/fontello.ttf?21002321') format('truetype'), - url('/fonts/fontello.svg?21002321#fontello') format('svg'); + src: url('/fonts/fontello.eot?61663884'); + src: url('/fonts/fontello.eot?61663884#iefix') format('embedded-opentype'), + url('/fonts/fontello.woff2?61663884') format('woff2'), + url('/fonts/fontello.woff?61663884') format('woff'), + url('/fonts/fontello.ttf?61663884') format('truetype'), + url('/fonts/fontello.svg?61663884#fontello') format('svg'); font-weight: normal; font-style: normal; } - - [class^="icon-"]:before, [class*=" icon-"]:before { +[class^="icon-"]:before, [class*=" icon-"]:before { font-family: "fontello"; font-style: normal; font-weight: normal; @@ -32,22 +31,23 @@ -webkit-font-smoothing: antialiased; -moz-osx-font-smoothing: grayscale; } - -.icon-heart:before { content: '\2665'; } /* '♥' */ -.icon-quote:before { content: '\275e'; } /* '❞' */ -.icon-comment:before { content: '\e802'; } /* '' */ -.icon-ok:before { content: '\e803'; } /* '' */ -.icon-play:before { content: '\e804'; } /* '' */ -.icon-link:before { content: '\e805'; } /* '' */ -.icon-calendar:before { content: '\e806'; } /* '' */ -.icon-location:before { content: '\e807'; } /* '' */ + +.icon-views:before { content: '\e800'; } /* '' */ +.icon-heart:before { content: '\e801'; } /* '' */ +.icon-quote:before { content: '\e802'; } /* '' */ +.icon-comment:before { content: '\e803'; } /* '' */ +.icon-ok:before { content: '\e804'; } /* '' */ +.icon-play:before { content: '\e805'; } /* '' */ +.icon-link:before { content: '\e806'; } /* '' */ +.icon-calendar:before { content: '\e807'; } /* '' */ +.icon-location:before { content: '\e808'; } /* '' */ .icon-picture:before { content: '\e809'; } /* '' */ .icon-lock:before { content: '\e80a'; } /* '' */ .icon-down:before { content: '\e80b'; } /* '' */ -.icon-retweet:before { content: '\e80d'; } /* '' */ -.icon-search:before { content: '\e80e'; } /* '' */ -.icon-pin:before { content: '\e80f'; } /* '' */ -.icon-cog:before { content: '\e812'; } /* '' */ -.icon-rss-feed:before { content: '\e813'; } /* '' */ +.icon-retweet:before { content: '\e80c'; } /* '' */ +.icon-search:before { content: '\e80d'; } /* '' */ +.icon-pin:before { content: '\e80e'; } /* '' */ +.icon-cog:before { content: '\e80f'; } /* '' */ +.icon-rss:before { content: '\e810'; } /* '' */ .icon-info:before { content: '\f128'; } /* '' */ .icon-bird:before { content: '\f309'; } /* '' */ diff --git a/public/fonts/LICENSE.txt b/public/fonts/LICENSE.txt index c8d90ff..41f18a8 100644 --- a/public/fonts/LICENSE.txt +++ b/public/fonts/LICENSE.txt @@ -1,6 +1,15 @@ Font license info +## Modern Pictograms + + Copyright (c) 2012 by John Caserta. All rights reserved. + + Author: John Caserta + License: SIL (http://scripts.sil.org/OFL) + Homepage: http://thedesignoffice.org/project/modern-pictograms/ + + ## Entypo Copyright (C) 2012 by Daniel Bruce @@ -37,12 +46,3 @@ Font license info Homepage: http://aristeides.com/ -## Modern Pictograms - - Copyright (c) 2012 by John Caserta. All rights reserved. - - Author: John Caserta - License: SIL (http://scripts.sil.org/OFL) - Homepage: http://thedesignoffice.org/project/modern-pictograms/ - - diff --git a/public/fonts/fontello.eot b/public/fonts/fontello.eot index aaddd6b..2b2982a 100644 Binary files a/public/fonts/fontello.eot and b/public/fonts/fontello.eot differ diff --git a/public/fonts/fontello.svg b/public/fonts/fontello.svg index 1f30ccc..2a64343 100644 --- a/public/fonts/fontello.svg +++ b/public/fonts/fontello.svg @@ -1,26 +1,28 @@ -Copyright (C) 2020 by original authors @ fontello.com +Copyright (C) 2025 by original authors @ fontello.com - + - + - + - + - + - + - + - + + + @@ -28,19 +30,19 @@ - + - + - + - + - + - \ No newline at end of file + diff --git a/public/fonts/fontello.ttf b/public/fonts/fontello.ttf index 29f1ec6..ef775f8 100644 Binary files a/public/fonts/fontello.ttf and b/public/fonts/fontello.ttf differ diff --git a/public/fonts/fontello.woff b/public/fonts/fontello.woff index 8428cf8..63c3c23 100644 Binary files a/public/fonts/fontello.woff and b/public/fonts/fontello.woff differ diff --git a/public/fonts/fontello.woff2 b/public/fonts/fontello.woff2 index 551f49d..b7541f0 100644 Binary files a/public/fonts/fontello.woff2 and b/public/fonts/fontello.woff2 differ diff --git a/src/api.nim b/src/api.nim index c0efa58..aeb0f17 100644 --- a/src/api.nim +++ b/src/api.nim @@ -7,12 +7,39 @@ import experimental/parser as newParser proc mediaUrl(id: string; cursor: string): SessionAwareUrl = let cookieVariables = userMediaVariables % [id, cursor] - oauthVariables = userTweetsVariables % [id, cursor] + oauthVariables = restIdVariables % [id, cursor] result = SessionAwareUrl( cookieUrl: graphUserMedia ? {"variables": cookieVariables, "features": gqlFeatures}, oauthUrl: graphUserMediaV2 ? {"variables": oauthVariables, "features": gqlFeatures} ) +proc userTweetsUrl(id: string; cursor: string): SessionAwareUrl = + let + cookieVariables = userTweetsVariables % [id, cursor] + oauthVariables = restIdVariables % [id, cursor] + result = SessionAwareUrl( + cookieUrl: graphUserTweets ? {"variables": cookieVariables, "features": gqlFeatures, "fieldToggles": fieldToggles}, + oauthUrl: graphUserTweetsV2 ? {"variables": oauthVariables, "features": gqlFeatures} + ) + +proc userTweetsAndRepliesUrl(id: string; cursor: string): SessionAwareUrl = + let + cookieVariables = userTweetsAndRepliesVariables % [id, cursor] + oauthVariables = restIdVariables % [id, cursor] + result = SessionAwareUrl( + cookieUrl: graphUserTweetsAndReplies ? {"variables": cookieVariables, "features": gqlFeatures, "fieldToggles": fieldToggles}, + oauthUrl: graphUserTweetsAndRepliesV2 ? {"variables": oauthVariables, "features": gqlFeatures} + ) + +proc tweetDetailUrl(id: string; cursor: string): SessionAwareUrl = + let + cookieVariables = tweetDetailVariables % [id, cursor] + oauthVariables = tweetVariables % [id, cursor] + result = SessionAwareUrl( + cookieUrl: graphTweetDetail ? {"variables": cookieVariables, "features": gqlFeatures, "fieldToggles": tweetDetailFieldToggles}, + oauthUrl: graphTweet ? {"variables": oauthVariables, "features": gqlFeatures} + ) + proc getGraphUser*(username: string): Future[User] {.async.} = if username.len == 0: return let @@ -33,13 +60,11 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi if id.len == 0: return let cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" - variables = userTweetsVariables % [id, cursor] - params = {"variables": variables, "features": gqlFeatures} js = case kind of TimelineKind.tweets: - await fetch(graphUserTweets ? params, Api.userTweets) + await fetch(userTweetsUrl(id, cursor), Api.userTweets) of TimelineKind.replies: - await fetch(graphUserTweetsAndReplies ? params, Api.userTweetsAndReplies) + await fetch(userTweetsAndRepliesUrl(id, cursor), Api.userTweetsAndReplies) of TimelineKind.media: await fetch(mediaUrl(id, cursor), Api.userMedia) result = parseGraphTimeline(js, after) @@ -48,7 +73,7 @@ proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} = if id.len == 0: return let cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" - variables = listTweetsVariables % [id, cursor] + variables = restIdVariables % [id, cursor] params = {"variables": variables, "features": gqlFeatures} js = await fetch(graphListTweets ? params, Api.listTweets) result = parseGraphTimeline(js, after).tweets @@ -94,9 +119,7 @@ proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} = if id.len == 0: return let cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" - variables = tweetVariables % [id, cursor] - params = {"variables": variables, "features": gqlFeatures} - js = await fetch(graphTweet ? params, Api.tweetDetail) + js = await fetch(tweetDetailUrl(id, cursor), Api.tweetDetail) result = parseGraphConversation(js, id) proc getReplies*(id, after: string): Future[Result[Chain]] {.async.} = diff --git a/src/apiutils.nim b/src/apiutils.nim index ae459fa..defffd1 100644 --- a/src/apiutils.nim +++ b/src/apiutils.nim @@ -60,11 +60,11 @@ proc getAndValidateSession*(api: Api): Future[Session] {.async.} = case result.kind of SessionKind.oauth: if result.oauthToken.len == 0: - echo "[sessions] Empty oauth token, session: ", result.id + echo "[sessions] Empty oauth token, session: ", result.pretty raise rateLimitError() of SessionKind.cookie: if result.authToken.len == 0 or result.ct0.len == 0: - echo "[sessions] Empty cookie credentials, session: ", result.id + echo "[sessions] Empty cookie credentials, session: ", result.pretty raise rateLimitError() template fetchImpl(result, fetchBody) {.dirty.} = @@ -107,7 +107,7 @@ template fetchImpl(result, fetchBody) {.dirty.} = setLimited(session, api) raise rateLimitError() elif result.startsWith("429 Too Many Requests"): - echo "[sessions] 429 error, API: ", api, ", session: ", session.id + echo "[sessions] 429 error, API: ", api, ", session: ", session.pretty session.apis[api].remaining = 0 # rate limit hit, resets after the 15 minute window raise rateLimitError() @@ -124,8 +124,8 @@ template fetchImpl(result, fetchBody) {.dirty.} = except OSError as e: raise e except Exception as e: - let id = if session.isNil: "null" else: $session.id - echo "error: ", e.name, ", msg: ", e.msg, ", sessionId: ", id, ", url: ", url + let s = session.pretty + echo "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", url raise rateLimitError() finally: release(session) diff --git a/src/auth.nim b/src/auth.nim index 9f9fe8a..734b43e 100644 --- a/src/auth.nim +++ b/src/auth.nim @@ -7,20 +7,6 @@ import experimental/parser/session const maxConcurrentReqs = 2 hourInSeconds = 60 * 60 - apiMaxReqs: Table[Api, int] = { - Api.search: 50, - Api.tweetDetail: 500, - Api.userTweets: 500, - Api.userTweetsAndReplies: 500, - Api.userMedia: 500, - Api.userRestId: 500, - Api.userScreenName: 500, - Api.tweetResult: 500, - Api.list: 500, - Api.listTweets: 500, - Api.listMembers: 500, - Api.listBySlug: 500 - }.toTable var sessionPool: seq[Session] @@ -29,6 +15,20 @@ var template log(str: varargs[string, `$`]) = echo "[sessions] ", str.join("") +proc pretty*(session: Session): string = + if session.isNil: + return "" + + if session.id > 0 and session.username.len > 0: + result = $session.id & " (" & session.username & ")" + elif session.username.len > 0: + result = session.username + elif session.id > 0: + result = $session.id + else: + result = "" + result = $session.kind & " " & result + proc snowflakeToEpoch(flake: int64): int64 = int64(((flake shr 22) + 1288834974657) div 1000) @@ -57,8 +57,7 @@ proc getSessionPoolHealth*(): JsonNode = for api in session.apis.keys: let apiStatus = session.apis[api] - limit = if apiStatus.limit > 0: apiStatus.limit else: apiMaxReqs.getOrDefault(api, 0) - reqs = limit - apiStatus.remaining + reqs = apiStatus.limit - apiStatus.remaining # no requests made with this session and endpoint since the limit reset if apiStatus.reset < now: @@ -130,7 +129,7 @@ proc isLimited(session: Session; api: Api): bool = if session.limited and api != Api.userTweets: if (epochTime().int - session.limitedAt) > hourInSeconds: session.limited = false - log "resetting limit: ", session.id + log "resetting limit: ", session.pretty return false else: return true @@ -146,7 +145,7 @@ proc isReady(session: Session; api: Api): bool = proc invalidate*(session: var Session) = if session.isNil: return - log "invalidating: ", session.id + log "invalidating: ", session.pretty # TODO: This isn't sufficient, but it works for now let idx = sessionPool.find(session) @@ -171,7 +170,7 @@ proc getSession*(api: Api): Future[Session] {.async.} = proc setLimited*(session: Session; api: Api) = session.limited = true session.limitedAt = epochTime().int - log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", id: ", session.id + log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty proc setRateLimit*(session: Session; api: Api; remaining, reset, limit: int) = # avoid undefined behavior in race conditions diff --git a/src/consts.nim b/src/consts.nim index c8ae8d2..2623484 100644 --- a/src/consts.nim +++ b/src/consts.nim @@ -9,16 +9,19 @@ const graphUser* = gql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery" graphUserById* = gql / "oPppcargziU1uDQHAUmH-A/UserResultByIdQuery" - graphUserTweets* = gql / "JLApJKFY0MxGTzCoK6ps8Q/UserWithProfileTweetsQueryV2" - graphUserTweetsAndReplies* = gql / "Y86LQY7KMvxn5tu3hFTyPg/UserWithProfileTweetsAndRepliesQueryV2" + graphUserTweetsV2* = gql / "JLApJKFY0MxGTzCoK6ps8Q/UserWithProfileTweetsQueryV2" + graphUserTweetsAndRepliesV2* = gql / "Y86LQY7KMvxn5tu3hFTyPg/UserWithProfileTweetsAndRepliesQueryV2" + graphUserTweets* = gql / "oRJs8SLCRNRbQzuZG93_oA/UserTweets" + graphUserTweetsAndReplies* = gql / "kkaJ0Mf34PZVarrxzLihjg/UserTweetsAndReplies" graphUserMedia* = gql / "36oKqyQ7E_9CmtONGjJRsA/UserMedia" graphUserMediaV2* = gql / "PDfFf8hGeJvUCiTyWtw4wQ/MediaTimelineV2" graphTweet* = gql / "Vorskcd2tZ-tc4Gx3zbk4Q/ConversationTimelineV2" + graphTweetDetail* = gql / "YVyS4SfwYW7Uw5qwy0mQCA/TweetDetail" graphTweetResult* = gql / "sITyJdhRPpvpEjg4waUmTA/TweetResultByIdQuery" - graphSearchTimeline* = gql / "KI9jCXUx3Ymt-hDKLOZb9Q/SearchTimeline" - graphListById* = gql / "oygmAig8kjn0pKsx_bUadQ/ListByRestId" - graphListBySlug* = gql / "88GTz-IPPWLn1EiU8XoNVg/ListBySlug" - graphListMembers* = gql / "kSmxeqEeelqdHSR7jMnb_w/ListMembers" + graphSearchTimeline* = gql / "7r8ibjHuK3MWUyzkzHNMYQ/SearchTimeline" + graphListById* = gql / "cIUpT1UjuGgl_oWiY7Snhg/ListByRestId" + graphListBySlug* = gql / "K6wihoTiTrzNzSF8y1aeKQ/ListBySlug" + graphListMembers* = gql / "fuVHh5-gFn8zDBBxb8wOMA/ListMembers" graphListTweets* = gql / "BbGLL1ZfMibdFNWlk7a0Pw/ListTimeline" gqlFeatures* = """{ @@ -96,24 +99,20 @@ const "withV2Timeline": true }""".replace(" ", "").replace("\n", "") -# oldUserTweetsVariables* = """{ -# "userId": "$1", $2 -# "count": 20, -# "includePromotedContent": false, -# "withDownvotePerspective": false, -# "withReactionsMetadata": false, -# "withReactionsPerspective": false, -# "withVoice": false, -# "withV2Timeline": true -# } -# """ + tweetDetailVariables* = """{ + "focalTweetId": "$1", + $2 + "referrer": "profile", + "with_rux_injections": false, + "rankingMode": "Relevance", + "includePromotedContent": true, + "withCommunity": true, + "withQuickPromoteEligibilityTweetFields": true, + "withBirdwatchNotes": true, + "withVoice": true +}""".replace(" ", "").replace("\n", "") - userTweetsVariables* = """{ - "rest_id": "$1", $2 - "count": 20 -}""" - - listTweetsVariables* = """{ + restIdVariables* = """{ "rest_id": "$1", $2 "count": 20 }""" @@ -126,3 +125,22 @@ const "withBirdwatchNotes": false, "withVoice": true }""".replace(" ", "").replace("\n", "") + + userTweetsVariables* = """{ + "userId": "$1", $2 + "count": 20, + "includePromotedContent": false, + "withQuickPromoteEligibilityTweetFields": true, + "withVoice": true +}""".replace(" ", "").replace("\n", "") + + userTweetsAndRepliesVariables* = """{ + "userId": "$1", $2 + "count": 20, + "includePromotedContent": false, + "withCommunity": true, + "withVoice": true +}""".replace(" ", "").replace("\n", "") + + fieldToggles* = """{"withArticlePlainText":false}""" + tweetDetailFieldToggles* = """{"withArticleRichContentState":true,"withArticlePlainText":false,"withGrokAnalyze":false,"withDisallowedReplyControls":false}""" diff --git a/src/experimental/parser/graphql.nim b/src/experimental/parser/graphql.nim index 69837ab..045a5d6 100644 --- a/src/experimental/parser/graphql.nim +++ b/src/experimental/parser/graphql.nim @@ -1,21 +1,39 @@ -import options +import options, strutils import jsony import user, ../types/[graphuser, graphlistmembers] from ../../types import User, VerifiedType, Result, Query, QueryKind +proc parseUserResult*(userResult: UserResult): User = + result = userResult.legacy + + if result.verifiedType == none and userResult.isBlueVerified: + result.verifiedType = blue + + if result.username.len == 0 and userResult.core.screenName.len > 0: + result.id = userResult.restId + result.username = userResult.core.screenName + result.fullname = userResult.core.name + result.userPic = userResult.avatar.imageUrl.replace("_normal", "") + + if userResult.verification.isSome: + let v = userResult.verification.get + if v.verifiedType != VerifiedType.none: + result.verifiedType = v.verifiedType + + if userResult.profileBio.isSome: + result.bio = userResult.profileBio.get.description + proc parseGraphUser*(json: string): User = if json.len == 0 or json[0] != '{': return let raw = json.fromJson(GraphUser) + let userResult = raw.data.userResult.result - if raw.data.userResult.result.unavailableReason.get("") == "Suspended": + if userResult.unavailableReason.get("") == "Suspended": return User(suspended: true) - result = raw.data.userResult.result.legacy - result.id = raw.data.userResult.result.restId - if result.verifiedType == VerifiedType.none and raw.data.userResult.result.isBlueVerified: - result.verifiedType = blue + result = parseUserResult(userResult) proc parseGraphListMembers*(json, cursor: string): Result[User] = result = Result[User]( @@ -31,7 +49,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] = of TimelineTimelineItem: let userResult = entry.content.itemContent.userResults.result if userResult.restId.len > 0: - result.content.add userResult.legacy + result.content.add parseUserResult(userResult) of TimelineTimelineCursor: if entry.content.cursorType == "Bottom": result.bottom = entry.content.value diff --git a/src/experimental/parser/session.nim b/src/experimental/parser/session.nim index bb31d83..2e5a171 100644 --- a/src/experimental/parser/session.nim +++ b/src/experimental/parser/session.nim @@ -13,6 +13,7 @@ proc parseSession*(raw: string): Session = result = Session( kind: SessionKind.oauth, id: parseBiggestInt(id), + username: session.username, oauthToken: session.oauthToken, oauthSecret: session.oauthTokenSecret ) @@ -21,6 +22,7 @@ proc parseSession*(raw: string): Session = result = Session( kind: SessionKind.cookie, id: id, + username: session.username, authToken: session.authToken, ct0: session.ct0 ) diff --git a/src/experimental/parser/unifiedcard.nim b/src/experimental/parser/unifiedcard.nim index a112974..de4df18 100644 --- a/src/experimental/parser/unifiedcard.nim +++ b/src/experimental/parser/unifiedcard.nim @@ -1,6 +1,7 @@ import std/[options, tables, strutils, strformat, sugar] import jsony import user, ../types/unifiedcard +import ../../formatters from ../../types import Card, CardKind, Video from ../../utils import twimg, https @@ -77,6 +78,18 @@ proc parseMedia(component: Component; card: UnifiedCard; result: var Card) = of model3d: result.title = "Unsupported 3D model ad" +proc parseGrokShare(data: ComponentData; card: UnifiedCard; result: var Card) = + result.kind = summaryLarge + + data.destination.parseDestination(card, result) + result.dest = "Answer by Grok" + + for msg in data.conversationPreview: + if msg.sender == "USER": + result.title = msg.message.shorten(70) + elif msg.sender == "AGENT": + result.text = msg.message.shorten(500) + proc parseUnifiedCard*(json: string): Card = let card = json.fromJson(UnifiedCard) @@ -92,6 +105,8 @@ proc parseUnifiedCard*(json: string): Card = component.parseMedia(card, result) of buttonGroup: discard + of grokShare: + component.data.parseGrokShare(card, result) of ComponentType.jobDetails: component.data.parseJobDetails(card, result) of ComponentType.hidden: diff --git a/src/experimental/parser/user.nim b/src/experimental/parser/user.nim index 07e0477..498757a 100644 --- a/src/experimental/parser/user.nim +++ b/src/experimental/parser/user.nim @@ -72,21 +72,3 @@ proc parseHook*(s: string; i: var int; v: var User) = var u: RawUser parseHook(s, i, u) v = toUser u - -proc parseUser*(json: string; username=""): User = - handleErrors: - case error.code - of suspended: return User(username: username, suspended: true) - of userNotFound: return - else: echo "[error - parseUser]: ", error - - result = json.fromJson(User) - -proc parseUsers*(json: string; after=""): Result[User] = - result = Result[User](beginning: after.len == 0) - - # starting with '{' means it's an error - if json[0] == '[': - let raw = json.fromJson(seq[RawUser]) - for user in raw: - result.content.add user.toUser diff --git a/src/experimental/types/graphuser.nim b/src/experimental/types/graphuser.nim index 08100f9..d732b4e 100644 --- a/src/experimental/types/graphuser.nim +++ b/src/experimental/types/graphuser.nim @@ -1,5 +1,5 @@ -import options -from ../../types import User +import options, strutils +from ../../types import User, VerifiedType type GraphUser* = object @@ -8,8 +8,32 @@ type UserData* = object result*: UserResult - UserResult = object + UserCore* = object + name*: string + screenName*: string + createdAt*: string + + UserBio* = object + description*: string + + UserAvatar* = object + imageUrl*: string + + Verification* = object + verifiedType*: VerifiedType + + UserResult* = object legacy*: User restId*: string isBlueVerified*: bool unavailableReason*: Option[string] + core*: UserCore + avatar*: UserAvatar + profileBio*: Option[UserBio] + verification*: Option[Verification] + +proc enumHook*(s: string; v: var VerifiedType) = + v = try: + parseEnum[VerifiedType](s) + except: + VerifiedType.none diff --git a/src/experimental/types/session.nim b/src/experimental/types/session.nim index dd6be22..dfec428 100644 --- a/src/experimental/types/session.nim +++ b/src/experimental/types/session.nim @@ -1,8 +1,8 @@ type RawSession* = object kind*: string - username*: string id*: string + username*: string oauthToken*: string oauthTokenSecret*: string authToken*: string diff --git a/src/experimental/types/timeline.nim b/src/experimental/types/timeline.nim deleted file mode 100644 index 5ce6d9f..0000000 --- a/src/experimental/types/timeline.nim +++ /dev/null @@ -1,23 +0,0 @@ -import std/tables -from ../../types import User - -type - Search* = object - globalObjects*: GlobalObjects - timeline*: Timeline - - GlobalObjects = object - users*: Table[string, User] - - Timeline = object - instructions*: seq[Instructions] - - Instructions = object - addEntries*: tuple[entries: seq[Entry]] - - Entry = object - entryId*: string - content*: tuple[operation: Operation] - - Operation = object - cursor*: tuple[value, cursorType: string] diff --git a/src/experimental/types/unifiedcard.nim b/src/experimental/types/unifiedcard.nim index e540a64..cef6f44 100644 --- a/src/experimental/types/unifiedcard.nim +++ b/src/experimental/types/unifiedcard.nim @@ -22,6 +22,7 @@ type communityDetails mediaWithDetailsHorizontal hidden + grokShare unknown Component* = object @@ -42,6 +43,7 @@ type topicDetail*: tuple[title: Text] profileUser*: User shortDescriptionText*: string + conversationPreview*: seq[GrokConversation] MediaItem* = object id*: string @@ -76,6 +78,10 @@ type title*: Text category*: Text + GrokConversation* = object + message*: string + sender*: string + TypeField = Component | Destination | MediaEntity | AppStoreData converter fromText*(text: Text): string = string(text) @@ -96,6 +102,7 @@ proc enumHook*(s: string; v: var ComponentType) = of "community_details": communityDetails of "media_with_details_horizontal": mediaWithDetailsHorizontal of "commerce_drop_details": hidden + of "grok_share": grokShare else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown proc enumHook*(s: string; v: var AppType) = diff --git a/src/formatters.nim b/src/formatters.nim index 7428814..cafaa4f 100644 --- a/src/formatters.nim +++ b/src/formatters.nim @@ -33,10 +33,13 @@ proc getUrlPrefix*(cfg: Config): string = if cfg.useHttps: https & cfg.hostname else: "http://" & cfg.hostname -proc shortLink*(text: string; length=28): string = - result = text.replace(wwwRegex, "") +proc shorten*(text: string; length=28): string = + result = text if result.len > length: result = result[0 ..< length] & "…" + +proc shortLink*(text: string; length=28): string = + result = text.replace(wwwRegex, "").shorten(length) proc stripHtml*(text: string; shorten=false): string = var html = parseHtml(text) diff --git a/src/parser.nim b/src/parser.nim index aa0f8b2..614ab57 100644 --- a/src/parser.nim +++ b/src/parser.nim @@ -42,16 +42,16 @@ proc parseGraphUser(js: JsonNode): User = result = parseUser(user{"legacy"}, user{"rest_id"}.getStr) # fallback to support UserMedia/recent GraphQL updates - if result.username.len == 0 and user{"core", "screen_name"}.notNull: + if result.username.len == 0: result.username = user{"core", "screen_name"}.getStr result.fullname = user{"core", "name"}.getStr result.userPic = user{"avatar", "image_url"}.getImageStr.replace("_normal", "") if user{"is_blue_verified"}.getBool(false): result.verifiedType = blue - elif user{"verification", "verified_type"}.notNull: - let verifiedType = user{"verification", "verified_type"}.getStr("None") - result.verifiedType = parseEnum[VerifiedType](verifiedType) + + with verifiedType, user{"verification", "verified_type"}: + result.verifiedType = parseEnum[VerifiedType](verifiedType.getStr) proc parseGraphList*(js: JsonNode): List = if js.isNull: return @@ -231,7 +231,8 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet = replies: js{"reply_count"}.getInt, retweets: js{"retweet_count"}.getInt, likes: js{"favorite_count"}.getInt, - quotes: js{"quote_count"}.getInt + quotes: js{"quote_count"}.getInt, + views: js{"views_count"}.getInt ) ) @@ -339,6 +340,9 @@ proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet = result.id = js{"rest_id"}.getId result.user = parseGraphUser(js{"core"}) + with count, js{"views", "count"}: + result.stats.views = count.getStr("0").parseInt + with noteTweet, js{"note_tweet", "note_tweet_results", "result"}: result.expandNoteTweetEntities(noteTweet) @@ -368,10 +372,10 @@ proc parseGraphTweetResult*(js: JsonNode): Tweet = with tweet, js{"data", "tweet_result", "result"}: result = parseGraphTweet(tweet, false) -proc parseGraphConversation*(js: JsonNode; tweetId: string; v2=true): Conversation = +proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation = result = Conversation(replies: Result[Chain](beginning: true)) - let + v2 = js{"data", "timeline_response"}.notNull rootKey = if v2: "timeline_response" else: "threaded_conversation_with_injections_v2" contentKey = if v2: "content" else: "itemContent" resultKey = if v2: "tweetResult" else: "tweet_results" @@ -381,7 +385,8 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string; v2=true): Conversati return for i in instructions: - if i{"__typename"}.getStr == "TimelineAddEntries": + let instrType = i{"type"}.getStr(i{"__typename"}.getStr) + if instrType == "TimelineAddEntries": for e in i{"entries"}: let entryId = e{"entryId"}.getStr if entryId.startsWith("tweet"): @@ -416,21 +421,24 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string; v2=true): Conversati elif entryId.startsWith("cursor-bottom"): result.replies.bottom = e{"content", contentKey, "value"}.getStr -proc extractTweetsFromEntry*(e: JsonNode; entryId: string): seq[Tweet] = - if e{"content", "items"}.notNull: - for item in e{"content", "items"}: - with tweetResult, item{"item", "itemContent", "tweet_results", "result"}: - var tweet = parseGraphTweet(tweetResult, false) - if not tweet.available: - tweet.id = parseBiggestInt(item{"entryId"}.getStr.getId()) - result.add tweet - return +proc extractTweetsFromEntry*(e: JsonNode): seq[Tweet] = + var tweetResult = e{"content", "itemContent", "tweet_results", "result"} + if tweetResult.isNull: + tweetResult = e{"content", "content", "tweetResult", "result"} - with tweetResult, e{"content", "content", "tweetResult", "result"}: + if tweetResult.notNull: var tweet = parseGraphTweet(tweetResult, false) if not tweet.available: - tweet.id = parseBiggestInt(entryId.getId()) + tweet.id = parseBiggestInt(e.getEntryId()) result.add tweet + return + + for item in e{"content", "items"}: + with tweetResult, item{"item", "itemContent", "tweet_results", "result"}: + var tweet = parseGraphTweet(tweetResult, false) + if not tweet.available: + tweet.id = parseBiggestInt(item{"entryId"}.getStr.getId()) + result.add tweet proc parseGraphTimeline*(js: JsonNode; after=""): Profile = result = Profile(tweets: Timeline(beginning: after.len == 0)) @@ -461,7 +469,7 @@ proc parseGraphTimeline*(js: JsonNode; after=""): Profile = for e in i{"entries"}: let entryId = e{"entryId"}.getStr if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"): - for tweet in extractTweetsFromEntry(e, entryId): + for tweet in extractTweetsFromEntry(e): result.tweets.content.add tweet elif "-conversation-" in entryId or entryId.startsWith("homeConversation"): let (thread, self) = parseGraphThread(e) @@ -469,15 +477,14 @@ proc parseGraphTimeline*(js: JsonNode; after=""): Profile = elif entryId.startsWith("cursor-bottom"): result.tweets.bottom = e{"content", "value"}.getStr - if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry": - with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}: - let tweet = parseGraphTweet(tweetResult, false) - tweet.pinned = true - if not tweet.available and tweet.tombstone.len == 0: - let entryId = i{"entry", "entryId"}.getEntryId - if entryId.len > 0: - tweet.id = parseBiggestInt(entryId) - result.pinned = some tweet + if after.len == 0: + let instrType = i{"type"}.getStr(i{"__typename"}.getStr) + if instrType == "TimelinePinEntry": + let tweets = extractTweetsFromEntry(i{"entry"}) + if tweets.len > 0: + var tweet = tweets[0] + tweet.pinned = true + result.pinned = some tweet proc parseGraphPhotoRail*(js: JsonNode): PhotoRail = result = @[] @@ -515,7 +522,7 @@ proc parseGraphPhotoRail*(js: JsonNode): PhotoRail = for e in i{"entries"}: let entryId = e{"entryId"}.getStr if entryId.startsWith("tweet") or entryId.startsWith("profile-grid"): - for t in extractTweetsFromEntry(e, entryId): + for t in extractTweetsFromEntry(e): let photo = extractGalleryPhoto(t) if photo.url.len > 0: result.add photo diff --git a/src/sass/tweet/card.scss b/src/sass/tweet/card.scss index 680310c..5575191 100644 --- a/src/sass/tweet/card.scss +++ b/src/sass/tweet/card.scss @@ -42,6 +42,7 @@ .card-description { margin: 0.3em 0; + white-space: pre-wrap; } .card-destination { diff --git a/src/types.nim b/src/types.nim index 092d85f..5a08bb7 100644 --- a/src/types.nim +++ b/src/types.nim @@ -38,6 +38,7 @@ type Session* = ref object id*: int64 + username*: string pending*: int limited*: bool limitedAt*: int @@ -202,6 +203,7 @@ type retweets*: int likes*: int quotes*: int + views*: int Tweet* = ref object id*: int64 diff --git a/src/views/general.nim b/src/views/general.nim index 5ba40a3..0091c74 100644 --- a/src/views/general.nim +++ b/src/views/general.nim @@ -30,7 +30,7 @@ proc renderNavbar(cfg: Config; req: Request; rss, canonical: string): VNode = tdiv(class="nav-item right"): icon "search", title="Search", href="/search" if cfg.enableRss and rss.len > 0: - icon "rss-feed", title="RSS Feed", href=rss + icon "rss", title="RSS Feed", href=rss icon "bird", title="Open in Twitter", href=canonical a(href="https://liberapay.com/zedeus"): verbatim lp icon "info", title="About", href="/about" @@ -53,7 +53,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc=""; buildHtml(head): link(rel="stylesheet", type="text/css", href="/css/style.css?v=19") - link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2") + link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=3") if theme.len > 0: link(rel="stylesheet", type="text/css", href=(&"/css/themes/{theme}.css")) @@ -119,7 +119,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc=""; # this is last so images are also preloaded # if this is done earlier, Chrome only preloads one image for some reason link(rel="preload", type="font/woff2", `as`="font", - href="/fonts/fontello.woff2?21002321", crossorigin="anonymous") + href="/fonts/fontello.woff2?61663884", crossorigin="anonymous") proc renderMain*(body: VNode; req: Request; cfg: Config; prefs=defaultPrefs; titleText=""; desc=""; ogTitle=""; rss=""; video=""; diff --git a/src/views/tweet.nim b/src/views/tweet.nim index aaebcee..e7fab99 100644 --- a/src/views/tweet.nim +++ b/src/views/tweet.nim @@ -184,6 +184,8 @@ proc renderStats(tweet_id: int64; stats: TweetStats; views: string): VNode = span(class="tweet-stat"): icon "retweet", formatStat(stats.retweets) a(class="tweet-stat", href=("/search?q=" & encodeUrl(&"-from:quotedreplies url:{tweet_id}") & "&e-nativeretweets=on")): icon "quote", formatStat(stats.quotes) span(class="tweet-stat"): icon "heart", formatStat(stats.likes) + if stats.views > 0: + span(class="tweet-stat"): icon "views", formatStat(stats.views) if views.len > 0: span(class="tweet-stat"): icon "play", insertSep(views, ',') diff --git a/tools/get_web_session.py b/tools/create_session_browser.py similarity index 90% rename from tools/get_web_session.py rename to tools/create_session_browser.py index 502c7f4..40e3dcd 100644 --- a/tools/get_web_session.py +++ b/tools/create_session_browser.py @@ -1,23 +1,20 @@ #!/usr/bin/env python3 """ -Authenticates with X.com/Twitter and extracts session cookies for use with Nitter. -Handles 2FA, extracts user info, and outputs clean JSON for sessions.jsonl. - Requirements: pip install -r tools/requirements.txt Usage: - python3 tools/get_web_session.py [totp_seed] [--append sessions.jsonl] [--headless] + python3 tools/create_session_browser.py [totp_seed] [--append sessions.jsonl] [--headless] Examples: # Output to terminal - python3 tools/get_web_session.py myusername mypassword TOTP_BASE32_SECRET + python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET # Append to sessions.jsonl - python3 tools/get_web_session.py myusername mypassword TOTP_SECRET --append sessions.jsonl + python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET --append sessions.jsonl # Headless mode (may increase detection risk) - python3 tools/get_web_session.py myusername mypassword TOTP_SECRET --headless + python3 tools/create_session_browser.py myusername mypassword TOTP_SECRET --headless Output: {"kind": "cookie", "username": "...", "id": "...", "auth_token": "...", "ct0": "..."} diff --git a/tools/create_session_curl.py b/tools/create_session_curl.py new file mode 100644 index 0000000..f569422 --- /dev/null +++ b/tools/create_session_curl.py @@ -0,0 +1,328 @@ +#!/usr/bin/env python3 +""" +Requirements: + pip install curl_cffi pyotp + +Usage: + python3 tools/create_session_curl.py [totp_seed] [--append sessions.jsonl] + +Examples: + # Output to terminal + python3 tools/create_session_curl.py myusername mypassword TOTP_SECRET + + # Append to sessions.jsonl + python3 tools/create_session_curl.py myusername mypassword TOTP_SECRET --append sessions.jsonl + +Output: + {"kind": "cookie", "username": "...", "id": "...", "auth_token": "...", "ct0": "..."} +""" + +import sys +import json +import pyotp +from curl_cffi import requests + +BEARER_TOKEN = "AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF" +BASE_URL = "https://api.x.com/1.1/onboarding/task.json" +GUEST_ACTIVATE_URL = "https://api.x.com/1.1/guest/activate.json" + +# Subtask versions required by API +SUBTASK_VERSIONS = { + "action_list": 2, "alert_dialog": 1, "app_download_cta": 1, + "check_logged_in_account": 2, "choice_selection": 3, + "contacts_live_sync_permission_prompt": 0, "cta": 7, "email_verification": 2, + "end_flow": 1, "enter_date": 1, "enter_email": 2, "enter_password": 5, + "enter_phone": 2, "enter_recaptcha": 1, "enter_text": 5, "generic_urt": 3, + "in_app_notification": 1, "interest_picker": 3, "js_instrumentation": 1, + "menu_dialog": 1, "notifications_permission_prompt": 2, "open_account": 2, + "open_home_timeline": 1, "open_link": 1, "phone_verification": 4, + "privacy_options": 1, "security_key": 3, "select_avatar": 4, + "select_banner": 2, "settings_list": 7, "show_code": 1, "sign_up": 2, + "sign_up_review": 4, "tweet_selection_urt": 1, "update_users": 1, + "upload_media": 1, "user_recommendations_list": 4, + "user_recommendations_urt": 1, "wait_spinner": 3, "web_modal": 1 +} + + +def get_base_headers(guest_token=None): + """Build base headers for API requests.""" + headers = { + "Authorization": f"Bearer {BEARER_TOKEN}", + "Content-Type": "application/json", + "Accept": "*/*", + "Accept-Language": "en-US", + "X-Twitter-Client-Language": "en-US", + "Origin": "https://x.com", + "Referer": "https://x.com/", + } + if guest_token: + headers["X-Guest-Token"] = guest_token + return headers + + +def get_cookies_dict(session): + """Extract cookies from session.""" + return session.cookies.get_dict() if hasattr(session.cookies, 'get_dict') else dict(session.cookies) + + +def make_request(session, headers, flow_token, subtask_data, print_msg): + """Generic request handler for flow steps.""" + print(f"[*] {print_msg}...", file=sys.stderr) + + payload = { + "flow_token": flow_token, + "subtask_inputs": [subtask_data] if isinstance(subtask_data, dict) else subtask_data + } + + response = session.post(BASE_URL, json=payload, headers=headers) + response.raise_for_status() + + data = response.json() + new_flow_token = data.get('flow_token') + if not new_flow_token: + raise Exception(f"Failed to get flow token: {print_msg}") + + return new_flow_token, data + + +def get_guest_token(session): + """Get guest token for unauthenticated requests.""" + print("[*] Getting guest token...", file=sys.stderr) + response = session.post(GUEST_ACTIVATE_URL, headers={"Authorization": f"Bearer {BEARER_TOKEN}"}) + response.raise_for_status() + + guest_token = response.json().get('guest_token') + if not guest_token: + raise Exception("Failed to obtain guest token") + + print(f"[*] Got guest token: {guest_token}", file=sys.stderr) + return guest_token + + +def init_flow(session, guest_token): + """Initialize the login flow.""" + print("[*] Initializing login flow...", file=sys.stderr) + + headers = get_base_headers(guest_token) + payload = { + "input_flow_data": { + "flow_context": { + "debug_overrides": {}, + "start_location": {"location": "manual_link"} + }, + "subtask_versions": SUBTASK_VERSIONS + } + } + + response = session.post(f"{BASE_URL}?flow_name=login", json=payload, headers=headers) + response.raise_for_status() + + flow_token = response.json().get('flow_token') + if not flow_token: + raise Exception("Failed to get initial flow token") + + print("[*] Got initial flow token", file=sys.stderr) + return flow_token, headers + + +def submit_username(session, flow_token, headers, guest_token, username): + """Submit username.""" + headers = headers.copy() + headers["X-Guest-Token"] = guest_token + + subtask = { + "subtask_id": "LoginEnterUserIdentifierSSO", + "settings_list": { + "setting_responses": [{ + "key": "user_identifier", + "response_data": {"text_data": {"result": username}} + }], + "link": "next_link" + } + } + + flow_token, data = make_request(session, headers, flow_token, subtask, "Submitting username") + + # Check for denial (suspicious activity) + if data.get('subtasks') and 'cta' in data['subtasks'][0]: + error_msg = data['subtasks'][0]['cta'].get('primary_text', {}).get('text') + if error_msg: + raise Exception(f"Login denied: {error_msg}") + + return flow_token + + +def submit_password(session, flow_token, headers, guest_token, password): + """Submit password and detect if 2FA is needed.""" + headers = headers.copy() + headers["X-Guest-Token"] = guest_token + + subtask = { + "subtask_id": "LoginEnterPassword", + "enter_password": {"password": password, "link": "next_link"} + } + + flow_token, data = make_request(session, headers, flow_token, subtask, "Submitting password") + + needs_2fa = any(s.get('subtask_id') == 'LoginTwoFactorAuthChallenge' for s in data.get('subtasks', [])) + if needs_2fa: + print("[*] 2FA required", file=sys.stderr) + + return flow_token, needs_2fa + + +def submit_2fa(session, flow_token, headers, guest_token, totp_seed): + """Submit 2FA code.""" + if not totp_seed: + raise Exception("2FA required but no TOTP seed provided") + + code = pyotp.TOTP(totp_seed).now() + print("[*] Generating 2FA code...", file=sys.stderr) + + headers = headers.copy() + headers["X-Guest-Token"] = guest_token + + subtask = { + "subtask_id": "LoginTwoFactorAuthChallenge", + "enter_text": {"text": code, "link": "next_link"} + } + + flow_token, _ = make_request(session, headers, flow_token, subtask, "Submitting 2FA code") + return flow_token + + +def submit_js_instrumentation(session, flow_token, headers, guest_token): + """Submit JS instrumentation response.""" + headers = headers.copy() + headers["X-Guest-Token"] = guest_token + + subtask = { + "subtask_id": "LoginJsInstrumentationSubtask", + "js_instrumentation": { + "response": '{"rf":{"a4fc506d24bb4843c48a1966940c2796bf4fb7617a2d515ad3297b7df6b459b6":121,"bff66e16f1d7ea28c04653dc32479cf416a9c8b67c80cb8ad533b2a44fee82a3":-1,"ac4008077a7e6ca03210159dbe2134dea72a616f03832178314bb9931645e4f7":-22,"c3a8a81a9b2706c6fec42c771da65a9597c537b8e4d9b39e8e58de9fe31ff239":-12},"s":"ZHYaDA9iXRxOl2J3AZ9cc23iJx-Fg5E82KIBA_fgeZFugZGYzRtf8Bl3EUeeYgsK30gLFD2jTQx9fAMsnYCw0j8ahEy4Pb5siM5zD6n7YgOeWmFFaXoTwaGY4H0o-jQnZi5yWZRAnFi4lVuCVouNz_xd2BO2sobCO7QuyOsOxQn2CWx7bjD8vPAzT5BS1mICqUWyjZDjLnRZJU6cSQG5YFIHEPBa8Kj-v1JFgkdAfAMIdVvP7C80HWoOqYivQR7IBuOAI4xCeLQEdxlGeT-JYStlP9dcU5St7jI6ExyMeQnRicOcxXLXsan8i5Joautk2M8dAJFByzBaG4wtrPhQ3QAAAZEi-_t7"}', + "link": "next_link" + } + } + + flow_token, _ = make_request(session, headers, flow_token, subtask, "Submitting JS instrumentation") + return flow_token + + +def complete_flow(session, flow_token, headers): + """Complete the login flow.""" + cookies = get_cookies_dict(session) + + headers = headers.copy() + headers["X-Twitter-Auth-Type"] = "OAuth2Session" + if cookies.get('ct0'): + headers["X-Csrf-Token"] = cookies['ct0'] + + subtask = { + "subtask_id": "AccountDuplicationCheck", + "check_logged_in_account": {"link": "AccountDuplicationCheck_false"} + } + + make_request(session, headers, flow_token, subtask, "Completing login flow") + + +def extract_user_id(cookies_dict): + """Extract user ID from twid cookie.""" + twid = cookies_dict.get('twid', '').strip('"') + + for prefix in ['u=', 'u%3D']: + if prefix in twid: + return twid.split(prefix)[1].split('&')[0].strip('"') + + return None + + +def login_and_get_cookies(username, password, totp_seed=None): + """Authenticate with X.com and extract session cookies.""" + session = requests.Session(impersonate="chrome") + + try: + guest_token = get_guest_token(session) + flow_token, headers = init_flow(session, guest_token) + flow_token = submit_js_instrumentation(session, flow_token, headers, guest_token) + flow_token = submit_username(session, flow_token, headers, guest_token, username) + flow_token, needs_2fa = submit_password(session, flow_token, headers, guest_token, password) + + if needs_2fa: + flow_token = submit_2fa(session, flow_token, headers, guest_token, totp_seed) + + complete_flow(session, flow_token, headers) + + cookies_dict = get_cookies_dict(session) + cookies_dict['username'] = username + + user_id = extract_user_id(cookies_dict) + if user_id: + cookies_dict['id'] = user_id + + print("[*] Successfully authenticated", file=sys.stderr) + return cookies_dict + + finally: + session.close() + + +def main(): + if len(sys.argv) < 3: + print('Usage: python3 create_session_curl.py username password [totp_seed] [--append sessions.jsonl]', file=sys.stderr) + sys.exit(1) + + username = sys.argv[1] + password = sys.argv[2] + totp_seed = None + append_file = None + + # Parse optional arguments + i = 3 + while i < len(sys.argv): + arg = sys.argv[i] + if arg == '--append': + if i + 1 < len(sys.argv): + append_file = sys.argv[i + 1] + i += 2 + else: + print('[!] Error: --append requires a filename', file=sys.stderr) + sys.exit(1) + elif not arg.startswith('--'): + if totp_seed is None: + totp_seed = arg + i += 1 + else: + print(f'[!] Warning: Unknown argument: {arg}', file=sys.stderr) + i += 1 + + try: + cookies = login_and_get_cookies(username, password, totp_seed) + + session = { + 'kind': 'cookie', + 'username': cookies['username'], + 'id': cookies.get('id'), + 'auth_token': cookies['auth_token'], + 'ct0': cookies['ct0'] + } + + output = json.dumps(session) + + if append_file: + with open(append_file, 'a') as f: + f.write(output + '\n') + print(f'✓ Session appended to {append_file}', file=sys.stderr) + else: + print(output) + + sys.exit(0) + + except Exception as error: + print(f'[!] Error: {error}', file=sys.stderr) + import traceback + traceback.print_exc(file=sys.stderr) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/tools/requirements.txt b/tools/requirements.txt index 4827475..2fdac24 100644 --- a/tools/requirements.txt +++ b/tools/requirements.txt @@ -1,2 +1,3 @@ nodriver>=0.48.0 pyotp +curl_cffi