Skip to content

Commit

Permalink
JTools v3.14.0
Browse files Browse the repository at this point in the history
功能变动:

- **迁移到新版数据源**
- 日志记录迁移到 sshared
- 更新鸣谢名单
- 升级构建时使用的 Caddy 镜像版本
  • Loading branch information
FHU-yezi committed Jun 13, 2024
2 parents bd74892 + 39b06d3 commit 185f766
Show file tree
Hide file tree
Showing 23 changed files with 680 additions and 589 deletions.
2 changes: 1 addition & 1 deletion Dockerfile.frontend
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ RUN bun install --prod --frozen-lockfile
COPY frontend .
RUN bun run build

FROM caddy:2.7-alpine
FROM caddy:2.8-alpine

WORKDIR /app

Expand Down
102 changes: 43 additions & 59 deletions backend/api/v1/articles.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from datetime import datetime, timedelta
from typing import Annotated, Any, Dict, Optional, cast
from typing import Annotated, Dict, Optional

from bson import ObjectId
from jkit.article import Article
from jkit.constants import ARTICLE_SLUG_REGEX
from jkit.exceptions import ResourceUnavailableError
Expand All @@ -19,8 +18,10 @@
success,
)

from models.jianshu.article_earning_ranking_record import (
ArticleEarningRankingRecordDocument,
)
from utils.config import config
from utils.db import ARTICLE_FP_RANK_COLLECTION

# fmt: off
splitter = AbilityJiebaPossegSplitterV1(
Expand All @@ -36,88 +37,71 @@


async def get_latest_onrank_record(
author_url: str, *, minimum_ranking: Optional[int] = None
) -> Optional[Dict[str, Any]]:
cursor = (
ARTICLE_FP_RANK_COLLECTION.find(
{
"author.url": author_url,
"ranking": {
"$lte": minimum_ranking if minimum_ranking else 100,
},
}
)
.sort("date", -1)
.limit(1)
author_slug: str, *, minimum_ranking: Optional[int] = None
) -> Optional[ArticleEarningRankingRecordDocument]:
# TODO
return await ArticleEarningRankingRecordDocument.find_one(
{
"authorSlug": author_slug,
"ranking": { # type: ignore
"$lte": minimum_ranking if minimum_ranking else 100,
},
},
sort={"date": "DESC"},
)

try:
return await cursor.next()
except StopAsyncIteration:
return None


async def get_pervious_onrank_record(
onrank_record: Dict[str, Any], minimum_ranking: Optional[int] = None
) -> Optional[Dict[str, Any]]:
cursor = (
ARTICLE_FP_RANK_COLLECTION.find(
{
"_id": {"$lt": ObjectId(onrank_record["_id"])},
"author.url": onrank_record["author"]["url"],
"ranking": {
"$lte": minimum_ranking if minimum_ranking else 100,
},
}
)
.sort("_id", -1)
.limit(1)
onrank_record: ArticleEarningRankingRecordDocument,
minimum_ranking: Optional[int] = None,
) -> Optional[ArticleEarningRankingRecordDocument]:
return await ArticleEarningRankingRecordDocument.find_one(
{
"_id": {"$lt": onrank_record._id},
"authorSlug": onrank_record.author_slug,
"ranking": { # type: ignore
"$lte": minimum_ranking if minimum_ranking else 100,
},
},
sort={"_id": "DESC"},
)

try:
return await cursor.next()
except StopAsyncIteration:
return None


async def caculate_next_can_recommend_date(author_url: str) -> Optional[datetime]:
counted_article_urls = set()
async def get_earliest_can_recommend_date(author_slug: str) -> Optional[datetime]:
counted_article_slugs = set()

latest_onrank_record = await get_latest_onrank_record(
author_url, minimum_ranking=85
author_slug, minimum_ranking=85
)
if not latest_onrank_record:
# 作者没有上榜文章,或全部上榜文章均低于 85 名
return None

interval_days = 10 if latest_onrank_record["ranking"] <= 30 else 7
counted_article_urls.add(latest_onrank_record["article"]["url"])
interval_days = 10 if latest_onrank_record.ranking <= 30 else 7
counted_article_slugs.add(latest_onrank_record.article.slug)

now_record = latest_onrank_record
while True:
pervious_record = await get_pervious_onrank_record(
now_record, minimum_ranking=85
)
if not pervious_record:
# 没有更多文章
return cast(datetime, now_record["date"]) + timedelta(days=interval_days)
if pervious_record["article"]["url"] in counted_article_urls:
# 该文章之前计算过间隔
return latest_onrank_record.date + timedelta(days=interval_days)
if pervious_record.article.slug in counted_article_slugs:
now_record = pervious_record
continue

counted_article_urls.add(pervious_record["article"]["url"])
counted_article_slugs.add(pervious_record.article.slug)

if (
now_record["ranking"] <= 30
and (now_record["date"] - pervious_record["date"]).days + 1 >= 10
now_record.ranking <= 30
and (now_record.date - pervious_record.date).days + 1 >= 10
) or (
now_record["ranking"] > 30
and (now_record["date"] - pervious_record["date"]).days + 1 >= 7
now_record.ranking > 30
and (now_record.date - pervious_record.date).days + 1 >= 7
):
return cast(datetime, now_record["date"]) + timedelta(days=interval_days)
return latest_onrank_record.date + timedelta(days=interval_days)

if pervious_record["ranking"] <= 30:
if pervious_record.ranking <= 30:
interval_days += 10
else:
interval_days += 7
Expand Down Expand Up @@ -231,10 +215,10 @@ async def get_LP_recommend_check_handler( # noqa: N802

article_info = await article.info

author_url = article_info.author_info.to_user_obj().url
author_slug = article_info.author_info.to_user_obj().slug
article_title = article_info.title
article_fp_reward = article_info.earned_fp_amount
article_next_can_recommend_date = await caculate_next_can_recommend_date(author_url)
article_next_can_recommend_date = await get_earliest_can_recommend_date(author_slug)

can_recommend_now = article_fp_reward < 35 and (
not article_next_can_recommend_date
Expand Down
110 changes: 45 additions & 65 deletions backend/api/v1/jpep/ftn_macket.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from asyncio import gather
from datetime import datetime, timedelta
from typing import Annotated, Any, Dict, Literal, Optional
from typing import Annotated, Dict, Literal, Optional

from jkit.jpep.platform_settings import PlatformSettings
from litestar import Response, Router, get
Expand All @@ -13,7 +13,7 @@
)
from sspeedup.time_helper import get_start_time

from utils.db import JPEP_FTN_MACKET_COLLECTION
from models.jpep.ftn_trade_order import FTNTradeOrderDocument

RANGE_TO_TIMEDELTA: Dict[str, timedelta] = {
"24h": timedelta(hours=24),
Expand All @@ -32,92 +32,72 @@


async def get_data_update_time() -> datetime:
result = (
await JPEP_FTN_MACKET_COLLECTION.find(
{},
{
"_id": 0,
"fetch_time": 1,
},
)
.sort("fetch_time", -1)
.limit(1)
.next()
)
return result["fetch_time"]
latest_record = await FTNTradeOrderDocument.find_one(sort={"fetchTime": "DESC"})

return latest_record.fetch_time # type: ignore


async def get_latest_order(type_: Literal["buy", "sell"]) -> Optional[Dict[str, Any]]:
async def get_latest_order(
type_: Literal["buy", "sell"],
) -> Optional[FTNTradeOrderDocument]:
time = await get_data_update_time()
try:
return (
await JPEP_FTN_MACKET_COLLECTION.find(
{
"fetch_time": time,
"trade_type": type_,
"amount.tradable": {"$ne": 0},
}
)
.sort("price", 1 if type_ == "buy" else -1)
.limit(1)
.next()
)
except StopAsyncIteration: # 该侧没有挂单
return None

return await FTNTradeOrderDocument.find_one(
{"fetchTime": time, "type": type_, "amount.tradable": {"$ne": 0}},
sort={"price": "ASC" if type_ == "buy" else "DESC"},
)


async def get_current_amount(type_: Literal["buy", "sell"]) -> Optional[int]:
time = await get_data_update_time()

try:
result = await JPEP_FTN_MACKET_COLLECTION.aggregate(
[
{
"$match": {
"fetch_time": time,
"trade_type": type_,
}
},
{
"$group": {
"_id": None,
"sum": {
"$sum": "$amount.tradable",
},
result = await FTNTradeOrderDocument.aggregate_one(
[
{
"$match": {
"fetchTime": time,
"type": type_,
}
},
{
"$group": {
"_id": None,
"sum": {
"$sum": "$amount.tradable",
},
},
]
).next()
return result["sum"]
except StopIteration: # 该侧没有挂单
return None
},
]
)

return result["sum"] if result else None


async def get_price_history(
type_: Literal["buy", "sell"], td: timedelta, time_unit: str
) -> Dict[datetime, float]:
result = JPEP_FTN_MACKET_COLLECTION.aggregate(
result = FTNTradeOrderDocument.aggregate_many(
[
{
"$match": {
"trade_type": type_,
"fetch_time": {
"fetchTime": {
"$gte": get_start_time(td),
},
"type": type_,
},
},
{
"$group": {
"_id": (
{
"$dateTrunc": {
"date": "$fetch_time",
"date": "$fetchTime",
"unit": time_unit,
},
}
)
if time_unit != "minute"
else "$fetch_time",
else "$fetchTime",
"price": {
"$min" if type_ == "buy" else "$max": "$price",
},
Expand All @@ -137,19 +117,19 @@ async def get_price_history(
async def get_amount_history(
type_: Literal["buy", "sell"], td: timedelta, time_unit: str
) -> Dict[datetime, float]:
result = JPEP_FTN_MACKET_COLLECTION.aggregate(
result = FTNTradeOrderDocument.aggregate_many(
[
{
"$match": {
"trade_type": type_,
"fetch_time": {
"fetchTime": {
"$gte": get_start_time(td),
},
"type": type_,
},
},
{
"$group": {
"_id": "$fetch_time",
"_id": "$fetchTime",
"amount": {
"$sum": "$amount.tradable",
},
Expand Down Expand Up @@ -189,12 +169,12 @@ async def get_current_amount_distribution(
) -> Dict[float, int]:
time = await get_data_update_time()

result = JPEP_FTN_MACKET_COLLECTION.aggregate(
result = FTNTradeOrderDocument.aggregate_many(
[
{
"$match": {
"fetch_time": time,
"trade_type": type_,
"fetchTime": time,
"type": type_,
},
},
{
Expand Down Expand Up @@ -272,8 +252,8 @@ async def get_current_price_handler() -> Response:
get_latest_order("buy"), get_latest_order("sell")
)

buy_price = buy_order["price"] if buy_order else None
sell_price = sell_order["price"] if sell_order else None
buy_price = buy_order.price if buy_order else None
sell_price = sell_order.price if sell_order else None

return success(
data=GetCurrentPriceResponse(
Expand Down
Loading

0 comments on commit 185f766

Please sign in to comment.