refactor: added standard nim logging library

This commit is contained in:
2025-11-18 21:24:17 -03:00
parent 4df434a7c6
commit 3845fb1213
9 changed files with 90 additions and 54 deletions

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables, logging
import jsony, packedjson, zippy, oauth1
import types, auth, consts, parserutils, http_pool
import experimental/types/common
@@ -60,11 +60,11 @@ proc getAndValidateSession*(api: Api): Future[Session] {.async.} =
case result.kind
of SessionKind.oauth:
if result.oauthToken.len == 0:
echo "[sessions] Empty oauth token, session: ", result.pretty
warn "[sessions] Empty oauth token, session: ", result.pretty
raise rateLimitError()
of SessionKind.cookie:
if result.authToken.len == 0 or result.ct0.len == 0:
echo "[sessions] Empty cookie credentials, session: ", result.pretty
warn "[sessions] Empty cookie credentials, session: ", result.pretty
raise rateLimitError()
template fetchImpl(result, fetchBody) {.dirty.} =
@@ -98,7 +98,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
if result.startsWith("{\"errors"):
let errors = result.fromJson(Errors)
if errors notin errorsToSkip:
echo "Fetch error, API: ", api, ", errors: ", errors
error "Fetch error, API: ", api, ", errors: ", errors
if errors in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
@@ -107,7 +107,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
setLimited(session, api)
raise rateLimitError()
elif result.startsWith("429 Too Many Requests"):
echo "[sessions] 429 error, API: ", api, ", session: ", session.pretty
warn "[sessions] 429 error, API: ", api, ", session: ", session.pretty
session.apis[api].remaining = 0
# rate limit hit, resets after the 15 minute window
raise rateLimitError()
@@ -115,7 +115,7 @@ template fetchImpl(result, fetchBody) {.dirty.} =
fetchBody
if resp.status == $Http400:
echo "ERROR 400, ", api, ": ", result
error "ERROR 400, ", api, ": ", result
raise newException(InternalError, $url)
except InternalError as e:
raise e
@@ -125,7 +125,10 @@ template fetchImpl(result, fetchBody) {.dirty.} =
raise e
except Exception as e:
let s = session.pretty
echo "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", url
var safeUrl = $url
if safeUrl.len > 100:
safeUrl = safeUrl[0 .. 100] & "..."
error "error: ", e.name, ", msg: ", e.msg, ", session: ", s, ", url: ", safeUrl
raise rateLimitError()
finally:
release(session)
@@ -134,7 +137,7 @@ template retry(bod) =
try:
bod
except RateLimitError:
echo "[sessions] Rate limited, retrying ", api, " request..."
info "[sessions] Rate limited, retrying ", api, " request..."
bod
proc fetch*(url: Uri | SessionAwareUrl; api: Api): Future[JsonNode] {.async.} =
@@ -152,13 +155,13 @@ proc fetch*(url: Uri | SessionAwareUrl; api: Api): Future[JsonNode] {.async.} =
if body.startsWith('{') or body.startsWith('['):
result = parseJson(body)
else:
echo resp.status, ": ", body, " --- url: ", url
warn resp.status, ": ", body, " --- url: ", url
result = newJNull()
let error = result.getError
if error != null and error notin errorsToSkip:
echo "Fetch error, API: ", api, ", error: ", error
if error in {expiredToken, badToken, locked}:
let apiErr = result.getError
if apiErr != null and apiErr notin errorsToSkip:
error "Fetch error, API: ", api, ", error: ", apiErr
if apiErr in {expiredToken, badToken, locked}:
invalidate(session)
raise rateLimitError()
@@ -173,5 +176,5 @@ proc fetchRaw*(url: Uri | SessionAwareUrl; api: Api): Future[string] {.async.} =
fetchImpl result:
if not (result.startsWith('{') or result.startsWith('[')):
echo resp.status, ": ", result, " --- url: ", url
warn resp.status, ": ", result, " --- url: ", url
result.setLen(0)

View File

@@ -1,5 +1,5 @@
#SPDX-License-Identifier: AGPL-3.0-only
import std/[asyncdispatch, times, json, random, sequtils, strutils, tables, packedsets, os]
import std/[asyncdispatch, times, json, random, strutils, tables, packedsets, os, logging]
import types
import experimental/parser/session
@@ -12,8 +12,11 @@ var
sessionPool: seq[Session]
enableLogging = false
template log(str: varargs[string, `$`]) =
echo "[sessions] ", str.join("")
proc logSession(args: varargs[string, `$`]) =
var s = "[sessions] "
for arg in args:
s.add arg
info s
proc pretty*(session: Session): string =
if session.isNil:
@@ -129,7 +132,7 @@ proc isLimited(session: Session; api: Api): bool =
if session.limited and api != Api.userTweets:
if (epochTime().int - session.limitedAt) > hourInSeconds:
session.limited = false
log "resetting limit: ", session.pretty
logSession "resetting limit: ", session.pretty
return false
else:
return true
@@ -145,7 +148,7 @@ proc isReady(session: Session; api: Api): bool =
proc invalidate*(session: var Session) =
if session.isNil: return
log "invalidating: ", session.pretty
logSession "invalidating: ", session.pretty
# TODO: This isn't sufficient, but it works for now
let idx = sessionPool.find(session)
@@ -164,13 +167,13 @@ proc getSession*(api: Api): Future[Session] {.async.} =
if not result.isNil and result.isReady(api):
inc result.pending
else:
log "no sessions available for API: ", api
logSession "no sessions available for API: ", api
raise noSessionsError()
proc setLimited*(session: Session; api: Api) =
session.limited = true
session.limitedAt = epochTime().int
log "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
logSession "rate limited by api: ", api, ", reqs left: ", session.apis[api].remaining, ", ", session.pretty
proc setRateLimit*(session: Session; api: Api; remaining, reset, limit: int) =
# avoid undefined behavior in race conditions
@@ -188,15 +191,15 @@ proc initSessionPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug
if path.endsWith(".json"):
log "ERROR: .json is not supported, the file must be a valid JSONL file ending in .jsonl"
fatal ".json is not supported, the file must be a valid JSONL file ending in .jsonl"
quit 1
if not fileExists(path):
log "ERROR: ", path, " not found. This file is required to authenticate API requests."
fatal path, " not found. This file is required to authenticate API requests."
quit 1
log "parsing JSONL account sessions file: ", path
logSession "parsing JSONL account sessions file: ", path
for line in path.lines:
sessionPool.add parseSession(line)
log "successfully added ", sessionPool.len, " valid account sessions"
logSession "successfully added ", sessionPool.len, " valid account sessions"

View File

@@ -1,4 +1,6 @@
import std/[options, tables, strutils, strformat, sugar]
import std/[options, tables, strutils, strformat, sugar, logging]
import jsony
import user, ../types/unifiedcard
import ../../formatters
@@ -112,7 +114,7 @@ proc parseUnifiedCard*(json: string): Card =
of ComponentType.hidden:
result.kind = CardKind.hidden
of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json
error "ERROR: Unknown component type: ", json
case component.kind
of twitterListDetails:

View File

@@ -1,4 +1,6 @@
import std/[options, tables, times]
import std/[options, tables, times, logging]
import jsony
from ../../types import VideoType, VideoVariant, User
@@ -103,21 +105,21 @@ proc enumHook*(s: string; v: var ComponentType) =
of "media_with_details_horizontal": mediaWithDetailsHorizontal
of "commerce_drop_details": hidden
of "grok_share": grokShare
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
else: error "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) =
v = case s
of "android_app": androidApp
of "iphone_app": iPhoneApp
of "ipad_app": iPadApp
else: echo "ERROR: Unknown enum value (AppType): ", s; androidApp
else: error "ERROR: Unknown enum value (AppType): ", s; androidApp
proc enumHook*(s: string; v: var MediaType) =
v = case s
of "video": video
of "photo": photo
of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo
else: error "ERROR: Unknown enum value (MediaType): ", s; photo
proc parseHook*(s: string; i: var int; v: var DateTime) =
var str: string

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strformat, logging
import asyncdispatch, strformat, logging, terminal, times, strutils
from net import Port
from htmlgen import a
from os import getEnv
@@ -15,6 +15,33 @@ import routes/[
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
const issuesUrl = "https://github.com/zedeus/nitter/issues"
type ColoredLogger = ref object of Logger
method log(logger: ColoredLogger, level: Level, args: varargs[string, `$`]) =
if level < logger.levelThreshold: return
let color = case level
of lvlFatal, lvlError: fgRed
of lvlWarn: fgYellow
of lvlInfo: fgGreen
of lvlDebug: fgCyan
else: fgWhite
let levelStr = case level
of lvlFatal: "fatal"
of lvlError: "error"
of lvlWarn: "warn"
of lvlInfo: "info"
of lvlDebug: "debug"
else: "other"
let timeStr = format(now(), "HH:mm:ss")
stdout.styledWrite(fgWhite, "[", timeStr, "] ", color, levelStr, fgWhite, ": ")
for arg in args:
stdout.write(arg)
stdout.write("\n")
stdout.flushFile()
let
configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf")
(cfg, fullCfg) = getConfig(configPath)
@@ -23,13 +50,14 @@ let
initSessionPool(cfg, sessionsPath)
if not cfg.enableDebug:
# Silence Jester's query warning
addHandler(newConsoleLogger())
setLogFilter(lvlError)
addHandler(new(ColoredLogger))
stdout.write &"Starting Nitter at {getUrlPrefix(cfg)}\n"
stdout.flushFile
if cfg.enableDebug:
setLogFilter(lvlDebug)
else:
setLogFilter(lvlInfo)
info &"Starting Nitter at {getUrlPrefix(cfg)}"
updateDefaultPrefs(fullCfg)
setCacheTimes(cfg)
@@ -40,8 +68,7 @@ setHttpProxy(cfg.proxy, cfg.proxyAuth)
initAboutPage(cfg.staticDir)
waitFor initRedisPool(cfg)
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
stdout.flushFile
info &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}"
createUnsupportedRouter(cfg)
createResolverRouter(cfg)
@@ -83,13 +110,13 @@ routes:
resp Http404, showError("Page not found", cfg)
error InternalError:
echo error.exc.name, ": ", error.exc.msg
error error.exc.name, ": ", error.exc.msg
const link = a("open a GitHub issue", href = issuesUrl)
resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg)
error BadClientError:
echo error.exc.name, ": ", error.exc.msg
error error.exc.name, ": ", error.exc.msg
resp Http500, showError("Network error occurred, please try again.", cfg)
error RateLimitError:

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import std/[times, macros, htmlgen, options, algorithm, re]
import std/[times, macros, htmlgen, options, algorithm, re, logging]
import std/strutils except escape
import std/unicode except strip
from xmltree import escape
@@ -84,7 +84,7 @@ proc getEntryId*(js: JsonNode): string {.inline.} =
elif "tombstone" in entry:
return js{"content", "item", "content", "tombstone", "tweet", "id"}.getStr
else:
echo "unknown entry: ", entry
warn "unknown entry: ", entry
return
template getStrVal*(js: JsonNode; default=""): string =

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, times, strformat, strutils, tables, hashes
import asyncdispatch, times, strformat, strutils, tables, hashes, logging
import redis, redpool, flatty, supersnappy
import types, api
@@ -59,8 +59,7 @@ proc initRedisPool*(cfg: Config) {.async.} =
await r.configSet("hash-max-ziplist-entries", "1000")
except OSError:
stdout.write "Failed to connect to Redis.\n"
stdout.flushFile
fatal "Failed to connect to Redis."
quit(1)
template uidKey(name: string): string = "pid:" & $(hash(name) div 1_000_000)
@@ -112,7 +111,7 @@ template deserialize(data, T) =
try:
result = fromFlatty(uncompress(data), T)
except:
echo "Decompression failed($#): '$#'" % [astToStr(T), data]
error "Decompression failed($#): '$#'" % [astToStr(T), data]
proc getUserId*(username: string): Future[string] {.async.} =
let name = toLower(username)
@@ -189,6 +188,6 @@ proc getCachedRss*(key: string): Future[Rss] {.async.} =
let feed = await r.hGet(k, "rss")
if feed.len > 0 and feed != redisNil:
try: result.feed = uncompress feed
except: echo "Decompressing RSS failed: ", feed
except: error "Decompressing RSS failed: ", feed
else:
result.cursor.setLen 0

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import uri, strutils, httpclient, os, hashes, base64, re
import uri, strutils, httpclient, os, hashes, base64, re, logging
import asynchttpserver, asyncstreams, asyncfile, asyncnet
import jester
@@ -38,7 +38,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
let res = await client.get(url)
if res.status != "200 OK":
if res.status != "404 Not Found":
echo "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
warn "[media] Proxying failed, status: $1, url: $2" % [res.status, url]
return Http404
let hashed = $hash(url)
@@ -67,7 +67,7 @@ proc proxyMedia*(req: jester.Request; url: string): Future[HttpCode] {.async.} =
await request.client.send(data)
data.setLen 0
except HttpRequestError, ProtocolError, OSError:
echo "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
error "[media] Proxying exception, error: $1, url: $2" % [getCurrentExceptionMsg(), url]
result = Http404
finally:
client.close()

View File

@@ -1,5 +1,5 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, strutils, sequtils, uri, options, sugar
import asyncdispatch, strutils, sequtils, uri, options, sugar, logging
import jester, karax/vdom
@@ -32,7 +32,7 @@ proc createStatusRouter*(cfg: Config) =
let conv = await getTweet(id, getCursor())
if conv == nil:
echo "nil conv"
warn "nil conv"
if conv == nil or conv.tweet == nil or conv.tweet.id == 0:
var error = "Tweet not found"