Skip to content

Commit

Permalink
Extended ResultItem by timestamp
Browse files Browse the repository at this point in the history
  • Loading branch information
mg98 committed Oct 10, 2024
1 parent 55b1e30 commit 549c682
Show file tree
Hide file tree
Showing 6 changed files with 16 additions and 8 deletions.
2 changes: 1 addition & 1 deletion src/tribler/core/content_discovery/payload.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ class TorrentInfoFormat(VariablePayload):
"""

format_list = ["20s", "I", "I", "Q"]
names = ["infohash", "seeders", "leechers", "timestamp"]
names = ["infohash", "seeders", " leechers", "timestamp"]
length = 36

infohash: bytes
Expand Down
16 changes: 10 additions & 6 deletions src/tribler/core/recommender/community.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def create_crawl_query_info(query_id: int) -> dict:
}


def create_crawl_query_info_response(query_id: int, results: int, chosen_index: int, query: str) -> dict:
def create_crawl_query_info_response(query_id: int, timestamp: int, results: int, chosen_index: int, query: str) -> dict:
"""
A response with the number of available results for the query with the id ``query_id``.
"""
Expand All @@ -78,6 +78,7 @@ def create_crawl_query_info_response(query_id: int, results: int, chosen_index:
"query_id": query_id,
"results": results,
"chosen_index": chosen_index,
"timestamp": timestamp,
"query": query
}

Expand Down Expand Up @@ -176,7 +177,8 @@ def process_query_info(self, peer: Peer, request: dict) -> None:
query_id=query.rowid,
results=len(unpacked["results"]),
chosen_index=unpacked["chosen_index"],
query=unpacked["query"]
timestamp=unpacked["timestamp"],
query=unpacked["query"],
)), b""))

@lazy_wrapper(Crawl)
Expand Down Expand Up @@ -220,6 +222,7 @@ def __init__(self, request_cache: RequestCache, peer: Peer, response: dict) -> N
self.total_results = response["results"]
self.results: list[ResultItem | None] = [None] * self.total_results
self.chosen_index = response["chosen_index"]
self.timestamp = response["timestamp"]
self.query = response["query"]

def get_next_range(self) -> tuple[int, int] | None:
Expand Down Expand Up @@ -300,14 +303,15 @@ def init_crawl_history(self) -> None:
self.crawl_history[peer_mid] = (max_id, missing)

def finalize_query(self, peer: Peer, query_id: int, query: str, chosen_index: int,
results: list[ResultItem]) -> None:
timestamp: int, results: list[ResultItem]) -> None:
"""
Update self.crawl_history and write the results to a file.
"""
query_dir = os.path.join(self.crawl_directory, hexlify(peer.mid).decode())
os.makedirs(query_dir, exist_ok=True)
json_dict = {
"query": query,
"timestamp": timestamp,
"chosen_index": chosen_index,
"results": results
}
Expand Down Expand Up @@ -356,7 +360,7 @@ def process_query_info_response(self, peer: Peer, response: dict) -> None:

if next_range is None:
self.logger.info("Query %d is empty for %s.", response["query_id"], str(peer))
self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index, [])
self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index, cache.timestamp, [])
else:
self.request_cache.add(cache)
self.ez_send(peer, Crawl(peer.mid, self.json_pack(create_crawl_fragment(
Expand All @@ -375,7 +379,7 @@ def process_query_fragment_response(self, peer: Peer, response: dict) -> None:

if next_range is None:
self.logger.info("Query %d has completed for %s.", response["query_id"], str(peer))
self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index,
self.finalize_query(peer, cache.query_id, cache.query, cache.chosen_index, cache.timestamp,
cast(list[ResultItem] , cache.results))
else:
self.request_cache.add(cache) # Reset the two-minute timer
Expand All @@ -399,4 +403,4 @@ def on_crawl_response(self, peer: Peer, payload: CrawlResponse) -> None:
elif request_type == "table_size":
self.process_table_size_response(peer, response)
else:
self.logger.warning("Crawlee sent unknown response type!")
self.logger.warning("Crawler sent unknown response type!")
2 changes: 1 addition & 1 deletion src/tribler/core/recommender/manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,4 +54,4 @@ def add_query(self, json_data: str) -> None:
Inject data into our database.
"""
with db_session:
self.Query(version=0, json=json_data)
self.Query(version=1, json=json_data)
1 change: 1 addition & 0 deletions src/tribler/core/recommender/orm_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ class Query(Entity, metaclass=IterQuery):
"""
{
chosen_index: int,
timestamp: int,
query: str,
results: [{infohash: str, seeders: int, leechers: int}]
}
Expand Down
2 changes: 2 additions & 0 deletions src/tribler/core/recommender/restapi/endpoint.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ def __init__(self, middlewares: tuple = (), client_max_size: int = MAX_REQUEST_S
@json_schema(schema(ClickedRequest={
"query": (String, "The query that led to the list of results"),
"chosen_index": (String, "The winning result index in the results list"),
"timestamp": (Integer, "The timestamp of the query"),
"results": (List(Nested(schema(ClickedResult={"infohash": (String, "A displayed infohash"),
"seeders": (Integer, "Its displayed number of seeders"),
"leechers": (Integer, "Its displayed number of seeders")}))),
Expand All @@ -63,6 +64,7 @@ async def put_clicked(self, request: RequestType) -> RESTResponse:
{
query: str,
chosen_index: int,
timestamp: int,
results: list[{
infohash: str,
seeders: int,
Expand Down
1 change: 1 addition & 0 deletions src/tribler/ui/src/services/tribler.service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -382,6 +382,7 @@ export class TriblerService {
return (await this.http.put(`/recommender/clicked`, {
query: query,
chosen_index: results.findIndex((e) => e.infohash == clicked.infohash),
timestamp: Date.now(),
results: results.map((x) => { return {
infohash: x.infohash,
seeders: x.num_seeders,
Expand Down

0 comments on commit 549c682

Please sign in to comment.