WIP new playlist strategy that pulls new songs from beatsaver.

This commit is contained in:
Brian Lee 2024-10-11 17:40:45 -07:00
parent 106c97ae41
commit eb3e3f3054
16 changed files with 463 additions and 1558 deletions

2
.env
View File

@ -1 +1 @@
LOG_LEVEL=DEBUG
#LOG_LEVEL=DEBUG

4
.gitignore vendored
View File

@ -1,4 +1,5 @@
.venv
.cache
.DS_Store/
__pycache__/
*.pyc
@ -8,4 +9,5 @@ dist/
archive/
*.bplist
covers/
comfyui-output/
comfyui-output/
temp_covers

27
docs/ClientUsage.md Normal file
View File

@ -0,0 +1,27 @@
# openapi-python-client usage
Here's how to use the generated client for the BeatSaver API.
```python
import json
import os
import logging
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
from clients.beatsaver.client import Client as beatsaver_client
from clients.beatsaver.api.maps import get_maps_latest
from clients.beatsaver.models import MapDetail, GetMapsLatestSort
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG
)
BASE_URL = "https://api.beatsaver.com"
beatsaver_client = beatsaver_client(base_url=BASE_URL)
maps = get_maps_latest.sync(client=beatsaver_client)
```

View File

@ -24,3 +24,11 @@ scores_data = beatleader_api.get_player_scores(
)
print(f"Got {len(scores_data.get('playerScores'))} scores for player {player_id}")
```
## BeatSaverClient
```python
from helpers.BeatSaverAPI import BeatSaverAPI
beatsaver_api = BeatSaverAPI()
map_data = beatsaver_api.get_maps(year=2024, month=9)
```

View File

@ -73,3 +73,9 @@ with open('archive/beatsaver.com.swagger_openapi3_fixed2.json', 'w') as f:
```sh
nix-shell -p ruff --run "openapi-python-client generate --path archive/beatsaver.com.swagger_openapi3_fixed2.json"
```
## Resources
* Beat Leader [swagger](https://api.beatleader.xyz/swagger/index.html), [GitHub](https://github.com/BeatLeader)
* Score Saber [swagger](https://docs.scoresaber.com/), [Github](https://github.com/ScoreSaber) (backend remains closed-source)
* Beat Saver [swagger](https://api.beatsaver.com/docs/), [GitHub](https://github.com/beatmaps-io/beatsaver-main)

View File

@ -1,543 +0,0 @@
# Python coding
We used openapi-python-client to generate client libraries for the beatleader.xyz api. It's in clients/beatleader in our python project:
```
.
├── docs/
│ ├── prompts/
│ └── *.md
├── src/
│ ├── clients/
│ │ ├── beatleader/
│ │ │ ├── api/ (various API endpoints)
│ │ │ ├── models/ (data models)
│ │ │ └── client.py, errors.py, __init__.py, types.py
│ │ ├── beatsaver/ (similar structure to beatleader)
│ │ └── scoresaber/ (similar structure to beatleader)
│ ├── helpers/
│ │ └── *.py
│ └── saberlist/
│ └── *.py
├── tests/
│ ├── assets/
│ └── playlist_builder.py
├── pyproject.toml
└── README.md
```
Here's the clients/beatleader dir:
```
treegit src/clients/beatleader/
src/clients/beatleader/
├── api
│   ├── beast_saber
│   │   ├── beast_saber_get_all.py
│   │   ├── beast_saber_nominate.py
│   │   └── __init__.py
│   ├── clan
│   │   ├── clan_get_all.py
│   │   ├── clan_get_clan_by_id.py
│   │   ├── clan_get_clan.py
│   │   ├── clan_get_clan_with_maps_by_id.py
│   │   ├── clan_get_clan_with_maps.py
│   │   ├── clan_get_history.py
│   │   ├── clan_global_map.py
│   │   └── __init__.py
│   ├── leaderboard
│   │   ├── __init__.py
│   │   ├── leaderboard_get_all.py
│   │   ├── leaderboard_get_clan_rankings.py
│   │   ├── leaderboard_get.py
│   │   └── leaderboard_get_scoregraph.py
│   ├── modifiers
│   │   ├── __init__.py
│   │   └── modifiers_get_modifiers.py
│   ├── patreon
│   │   ├── __init__.py
│   │   └── patreon_refresh_my_patreon.py
│   ├── player
│   │   ├── __init__.py
│   │   ├── player_get_beat_saver.py
│   │   ├── player_get_discord.py
│   │   ├── player_get_followers_info.py
│   │   ├── player_get_followers.py
│   │   ├── player_get_founded_clan.py
│   │   ├── player_get_participating_events.py
│   │   ├── player_get_patreon.py
│   │   ├── player_get_players.py
│   │   ├── player_get.py
│   │   └── player_get_ranked_maps.py
│   ├── player_scores
│   │   ├── __init__.py
│   │   ├── player_scores_acc_graph.py
│   │   ├── player_scores_get_compact_history.py
│   │   ├── player_scores_get_compact_scores.py
│   │   ├── player_scores_get_history.py
│   │   ├── player_scores_get_pinned_scores.py
│   │   ├── player_scores_get_scores.py
│   │   └── player_scores_get_score_value.py
│   ├── song
│   │   ├── __init__.py
│   │   └── song_get_all.py
│   └── __init__.py
├── models
│   ├── __pycache__
│   ├── achievement_description.py
│   ├── achievement_level.py
│   ├── achievement.py
│   ├── badge.py
│   ├── ban.py
│   ├── beasties_nomination.py
│   ├── besties_nomination_response.py
│   ├── clan_bigger_response.py
│   ├── clan_global_map_point.py
│   ├── clan_global_map.py
│   ├── clan_map_connection.py
│   ├── clan_maps_sort_by.py
│   ├── clan_point.py
│   ├── clan.py
│   ├── clan_ranking_response_clan_response_full_response_with_metadata_and_container.py
│   ├── clan_ranking_response.py
│   ├── clan_response_full.py
│   ├── clan_response_full_response_with_metadata.py
│   ├── clan_response.py
│   ├── clan_sort_by.py
│   ├── compact_leaderboard.py
│   ├── compact_leaderboard_response.py
│   ├── compact_score.py
│   ├── compact_score_response.py
│   ├── compact_score_response_response_with_metadata.py
│   ├── compact_song_response.py
│   ├── controller_enum.py
│   ├── criteria_commentary.py
│   ├── difficulty_description.py
│   ├── difficulty_response.py
│   ├── difficulty_status.py
│   ├── event_player.py
│   ├── event_ranking.py
│   ├── external_status.py
│   ├── featured_playlist.py
│   ├── featured_playlist_response.py
│   ├── follower_type.py
│   ├── global_map_history.py
│   ├── history_compact_response.py
│   ├── hmd.py
│   ├── info_to_highlight.py
│   ├── __init__.py
│   ├── leaderboard_change.py
│   ├── leaderboard_clan_ranking_response.py
│   ├── leaderboard_contexts.py
│   ├── leaderboard_group_entry.py
│   ├── leaderboard_info_response.py
│   ├── leaderboard_info_response_response_with_metadata.py
│   ├── leaderboard.py
│   ├── leaderboard_response.py
│   ├── leaderboard_sort_by.py
│   ├── legacy_modifiers.py
│   ├── link_response.py
│   ├── map_diff_response.py
│   ├── map_info_response.py
│   ├── map_info_response_response_with_metadata.py
│   ├── mapper.py
│   ├── mapper_response.py
│   ├── map_quality.py
│   ├── map_sort_by.py
│   ├── maps_type.py
│   ├── metadata.py
│   ├── modifiers_map.py
│   ├── modifiers_rating.py
│   ├── my_type.py
│   ├── operation.py
│   ├── order.py
│   ├── participating_event_response.py
│   ├── patreon_features.py
│   ├── player_change.py
│   ├── player_context_extension.py
│   ├── player_follower.py
│   ├── player_followers_info_response.py
│   ├── player.py
│   ├── player_response_clan_response_full_response_with_metadata_and_container.py
│   ├── player_response_full.py
│   ├── player_response.py
│   ├── player_response_with_stats.py
│   ├── player_response_with_stats_response_with_metadata.py
│   ├── player_score_stats_history.py
│   ├── player_score_stats.py
│   ├── player_search.py
│   ├── player_social.py
│   ├── player_sort_by.py
│   ├── pp_type.py
│   ├── profile_settings.py
│   ├── qualification_change.py
│   ├── qualification_commentary.py
│   ├── qualification_vote.py
│   ├── ranked_mapper_response.py
│   ├── ranked_map.py
│   ├── rank_qualification.py
│   ├── rank_update_change.py
│   ├── rank_update.py
│   ├── rank_voting.py
│   ├── replay_offsets.py
│   ├── requirements.py
│   ├── score_filter_status.py
│   ├── score_graph_entry.py
│   ├── score_improvement.py
│   ├── score_metadata.py
│   ├── score_response.py
│   ├── score_response_with_acc.py
│   ├── score_response_with_my_score.py
│   ├── score_response_with_my_score_response_with_metadata.py
│   ├── scores_sort_by.py
│   ├── song.py
│   ├── song_response.py
│   ├── song_status.py
│   ├── type.py
│   └── voter_feedback.py
├── __pycache__
├── client.py
├── errors.py
├── __init__.py
├── py.typed
└── types.py
13 directories, 158 files
```
Here's the contents of `src/clients/beatleader/client.py`:
```python
import ssl
from typing import Any, Dict, Optional, Union
import httpx
from attrs import define, evolve, field
@define
class Client:
"""A class for keeping track of data related to the API
The following are accepted as keyword arguments and will be used to construct httpx Clients internally:
``base_url``: The base URL for the API, all requests are made to a relative path to this URL
``cookies``: A dictionary of cookies to be sent with every request
``headers``: A dictionary of headers to be sent with every request
``timeout``: The maximum amount of a time a request can take. API functions will raise
httpx.TimeoutException if this is exceeded.
``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production,
but can be set to False for testing purposes.
``follow_redirects``: Whether or not to follow redirects. Default value is False.
``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor.
Attributes:
raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a
status code that was not documented in the source OpenAPI document. Can also be provided as a keyword
argument to the constructor.
"""
raise_on_unexpected_status: bool = field(default=False, kw_only=True)
_base_url: str = field(alias="base_url")
_cookies: Dict[str, str] = field(factory=dict, kw_only=True, alias="cookies")
_headers: Dict[str, str] = field(factory=dict, kw_only=True, alias="headers")
_timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout")
_verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl")
_follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects")
_httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args")
_client: Optional[httpx.Client] = field(default=None, init=False)
_async_client: Optional[httpx.AsyncClient] = field(default=None, init=False)
def with_headers(self, headers: Dict[str, str]) -> "Client":
"""Get a new client matching this one with additional headers"""
if self._client is not None:
self._client.headers.update(headers)
if self._async_client is not None:
self._async_client.headers.update(headers)
return evolve(self, headers={**self._headers, **headers})
def with_cookies(self, cookies: Dict[str, str]) -> "Client":
"""Get a new client matching this one with additional cookies"""
if self._client is not None:
self._client.cookies.update(cookies)
if self._async_client is not None:
self._async_client.cookies.update(cookies)
return evolve(self, cookies={**self._cookies, **cookies})
def with_timeout(self, timeout: httpx.Timeout) -> "Client":
"""Get a new client matching this one with a new timeout (in seconds)"""
if self._client is not None:
self._client.timeout = timeout
if self._async_client is not None:
self._async_client.timeout = timeout
return evolve(self, timeout=timeout)
def set_httpx_client(self, client: httpx.Client) -> "Client":
"""Manually the underlying httpx.Client
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
"""
self._client = client
return self
def get_httpx_client(self) -> httpx.Client:
"""Get the underlying httpx.Client, constructing a new one if not previously set"""
if self._client is None:
self._client = httpx.Client(
base_url=self._base_url,
cookies=self._cookies,
headers=self._headers,
timeout=self._timeout,
verify=self._verify_ssl,
follow_redirects=self._follow_redirects,
**self._httpx_args,
)
return self._client
def __enter__(self) -> "Client":
"""Enter a context manager for self.client—you cannot enter twice (see httpx docs)"""
self.get_httpx_client().__enter__()
return self
def __exit__(self, *args: Any, **kwargs: Any) -> None:
"""Exit a context manager for internal httpx.Client (see httpx docs)"""
self.get_httpx_client().__exit__(*args, **kwargs)
def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "Client":
"""Manually the underlying httpx.AsyncClient
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
"""
self._async_client = async_client
return self
def get_async_httpx_client(self) -> httpx.AsyncClient:
"""Get the underlying httpx.AsyncClient, constructing a new one if not previously set"""
if self._async_client is None:
self._async_client = httpx.AsyncClient(
base_url=self._base_url,
cookies=self._cookies,
headers=self._headers,
timeout=self._timeout,
verify=self._verify_ssl,
follow_redirects=self._follow_redirects,
**self._httpx_args,
)
return self._async_client
async def __aenter__(self) -> "Client":
"""Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)"""
await self.get_async_httpx_client().__aenter__()
return self
async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
"""Exit a context manager for underlying httpx.AsyncClient (see httpx docs)"""
await self.get_async_httpx_client().__aexit__(*args, **kwargs)
@define
class AuthenticatedClient:
"""A Client which has been authenticated for use on secured endpoints
The following are accepted as keyword arguments and will be used to construct httpx Clients internally:
``base_url``: The base URL for the API, all requests are made to a relative path to this URL
``cookies``: A dictionary of cookies to be sent with every request
``headers``: A dictionary of headers to be sent with every request
``timeout``: The maximum amount of a time a request can take. API functions will raise
httpx.TimeoutException if this is exceeded.
``verify_ssl``: Whether or not to verify the SSL certificate of the API server. This should be True in production,
but can be set to False for testing purposes.
``follow_redirects``: Whether or not to follow redirects. Default value is False.
``httpx_args``: A dictionary of additional arguments to be passed to the ``httpx.Client`` and ``httpx.AsyncClient`` constructor.
Attributes:
raise_on_unexpected_status: Whether or not to raise an errors.UnexpectedStatus if the API returns a
status code that was not documented in the source OpenAPI document. Can also be provided as a keyword
argument to the constructor.
token: The token to use for authentication
prefix: The prefix to use for the Authorization header
auth_header_name: The name of the Authorization header
"""
raise_on_unexpected_status: bool = field(default=False, kw_only=True)
_base_url: str = field(alias="base_url")
_cookies: Dict[str, str] = field(factory=dict, kw_only=True, alias="cookies")
_headers: Dict[str, str] = field(factory=dict, kw_only=True, alias="headers")
_timeout: Optional[httpx.Timeout] = field(default=None, kw_only=True, alias="timeout")
_verify_ssl: Union[str, bool, ssl.SSLContext] = field(default=True, kw_only=True, alias="verify_ssl")
_follow_redirects: bool = field(default=False, kw_only=True, alias="follow_redirects")
_httpx_args: Dict[str, Any] = field(factory=dict, kw_only=True, alias="httpx_args")
_client: Optional[httpx.Client] = field(default=None, init=False)
_async_client: Optional[httpx.AsyncClient] = field(default=None, init=False)
token: str
prefix: str = "Bearer"
auth_header_name: str = "Authorization"
def with_headers(self, headers: Dict[str, str]) -> "AuthenticatedClient":
"""Get a new client matching this one with additional headers"""
if self._client is not None:
self._client.headers.update(headers)
if self._async_client is not None:
self._async_client.headers.update(headers)
return evolve(self, headers={**self._headers, **headers})
def with_cookies(self, cookies: Dict[str, str]) -> "AuthenticatedClient":
"""Get a new client matching this one with additional cookies"""
if self._client is not None:
self._client.cookies.update(cookies)
if self._async_client is not None:
self._async_client.cookies.update(cookies)
return evolve(self, cookies={**self._cookies, **cookies})
def with_timeout(self, timeout: httpx.Timeout) -> "AuthenticatedClient":
"""Get a new client matching this one with a new timeout (in seconds)"""
if self._client is not None:
self._client.timeout = timeout
if self._async_client is not None:
self._async_client.timeout = timeout
return evolve(self, timeout=timeout)
def set_httpx_client(self, client: httpx.Client) -> "AuthenticatedClient":
"""Manually the underlying httpx.Client
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
"""
self._client = client
return self
def get_httpx_client(self) -> httpx.Client:
"""Get the underlying httpx.Client, constructing a new one if not previously set"""
if self._client is None:
self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token
self._client = httpx.Client(
base_url=self._base_url,
cookies=self._cookies,
headers=self._headers,
timeout=self._timeout,
verify=self._verify_ssl,
follow_redirects=self._follow_redirects,
**self._httpx_args,
)
return self._client
def __enter__(self) -> "AuthenticatedClient":
"""Enter a context manager for self.client—you cannot enter twice (see httpx docs)"""
self.get_httpx_client().__enter__()
return self
def __exit__(self, *args: Any, **kwargs: Any) -> None:
"""Exit a context manager for internal httpx.Client (see httpx docs)"""
self.get_httpx_client().__exit__(*args, **kwargs)
def set_async_httpx_client(self, async_client: httpx.AsyncClient) -> "AuthenticatedClient":
"""Manually the underlying httpx.AsyncClient
**NOTE**: This will override any other settings on the client, including cookies, headers, and timeout.
"""
self._async_client = async_client
return self
def get_async_httpx_client(self) -> httpx.AsyncClient:
"""Get the underlying httpx.AsyncClient, constructing a new one if not previously set"""
if self._async_client is None:
self._headers[self.auth_header_name] = f"{self.prefix} {self.token}" if self.prefix else self.token
self._async_client = httpx.AsyncClient(
base_url=self._base_url,
cookies=self._cookies,
headers=self._headers,
timeout=self._timeout,
verify=self._verify_ssl,
follow_redirects=self._follow_redirects,
**self._httpx_args,
)
return self._async_client
async def __aenter__(self) -> "AuthenticatedClient":
"""Enter a context manager for underlying httpx.AsyncClient—you cannot enter twice (see httpx docs)"""
await self.get_async_httpx_client().__aenter__()
return self
async def __aexit__(self, *args: Any, **kwargs: Any) -> None:
"""Exit a context manager for underlying httpx.AsyncClient (see httpx docs)"""
await self.get_async_httpx_client().__aexit__(*args, **kwargs)
```
Here is our attempt at using this client in ipython:
```python
import json
import os
import logging
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
from clients.beatleader import client as beatleader_client
from clients.beatleader.api.player_scores import player_scores_get_compact_scores
from clients.beatleader.models.score_response_with_my_score_response_with_metadata import ScoreResponseWithMyScoreResponseWithMetadata
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG
)
player_id = '76561199407393962'
BASE_URL = "https://api.beatleader.xyz"
client = beatleader_client.Client(base_url=BASE_URL)
response: ScoreResponseWithMyScoreResponseWithMetadata = player_scores_get_compact_scores.sync(
client=client,
id=player_id)
```
And the result:
```
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In[1], line 21
17 player_id = '76561199407393962'
19 BASE_URL = "https://api.beatleader.xyz"
---> 21 response: ScoreResponseWithMyScoreResponseWithMetadata = player_scores_get_compact_scores.sync_detailed(
22 client=beatleader_client,
23 id=player_id)
File ~/ops/beatsaber/playlist-tool/src/clients/beatleader/api/player_scores/player_scores_get_compact_scores.py:216, in sync_detailed(id, client, sort_by, order, page, count, search, diff, mode, requirements, score_status, leaderboard_context, type, modifiers, stars_from, stars_to, time_from, time_to, event_id)
162 """Retrieve player's scores in a compact form
163
164 Fetches a paginated list of scores for a specified player ID. Returns less info to save bandwith or
(...)
192 Response[Union[Any, CompactScoreResponseResponseWithMetadata]]
193 """
195 kwargs = _get_kwargs(
196 id=id,
197 sort_by=sort_by,
(...)
213 event_id=event_id,
214 )
--> 216 response = client.get_httpx_client().request(
217 **kwargs,
218 )
220 return _build_response(client=client, response=response)
AttributeError: module 'clients.beatleader.client' has no attribute 'get_httpx_client'
```

View File

@ -1,216 +0,0 @@
# Python coding
We used openapi-python-client to generate client libraries for the beatleader.xyz api. It's in clients/beatleader in our python project:
```
.
├── docs/
│ ├── prompts/
│ └── *.md
├── src/
│ ├── clients/
│ │ ├── beatleader/
│ │ │ ├── api/ (various API endpoints)
│ │ │ ├── models/ (data models)
│ │ │ └── client.py, errors.py, __init__.py, types.py
│ │ ├── beatsaver/ (similar structure to beatleader)
│ │ └── scoresaber/ (similar structure to beatleader)
│ ├── helpers/
│ │ └── *.py
│ └── saberlist/
│ └── *.py
├── tests/
│ ├── assets/
│ └── playlist_builder.py
├── pyproject.toml
└── README.md
```
Here's the clients/beatleader dir:
```
treegit src/clients/beatleader/
src/clients/beatleader/
├── api
│   ├── beast_saber
│   │   ├── beast_saber_get_all.py
│   │   ├── beast_saber_nominate.py
│   │   └── __init__.py
│   ├── clan
│   │   ├── clan_get_all.py
│   │   ├── clan_get_clan_by_id.py
│   │   ├── clan_get_clan.py
│   │   ├── clan_get_clan_with_maps_by_id.py
│   │   ├── clan_get_clan_with_maps.py
│   │   ├── clan_get_history.py
│   │   ├── clan_global_map.py
│   │   └── __init__.py
│   ├── leaderboard
│   │   ├── __init__.py
│   │   ├── leaderboard_get_all.py
│   │   ├── leaderboard_get_clan_rankings.py
│   │   ├── leaderboard_get.py
│   │   └── leaderboard_get_scoregraph.py
│   ├── modifiers
│   │   ├── __init__.py
│   │   └── modifiers_get_modifiers.py
│   ├── patreon
│   │   ├── __init__.py
│   │   └── patreon_refresh_my_patreon.py
│   ├── player
│   │   ├── __init__.py
│   │   ├── player_get_beat_saver.py
│   │   ├── player_get_discord.py
│   │   ├── player_get_followers_info.py
│   │   ├── player_get_followers.py
│   │   ├── player_get_founded_clan.py
│   │   ├── player_get_participating_events.py
│   │   ├── player_get_patreon.py
│   │   ├── player_get_players.py
│   │   ├── player_get.py
│   │   └── player_get_ranked_maps.py
│   ├── player_scores
│   │   ├── __init__.py
│   │   ├── player_scores_acc_graph.py
│   │   ├── player_scores_get_compact_history.py
│   │   ├── player_scores_get_compact_scores.py
│   │   ├── player_scores_get_history.py
│   │   ├── player_scores_get_pinned_scores.py
│   │   ├── player_scores_get_scores.py
│   │   └── player_scores_get_score_value.py
│   ├── song
│   │   ├── __init__.py
│   │   └── song_get_all.py
│   └── __init__.py
├── models
│   ├── __pycache__
│   ├── achievement_description.py
│   ├── achievement_level.py
│   ├── achievement.py
│   ├── badge.py
│   ├── ban.py
│   ├── beasties_nomination.py
│   ├── besties_nomination_response.py
│   ├── clan_bigger_response.py
│   ├── clan_global_map_point.py
│   ├── clan_global_map.py
│   ├── clan_map_connection.py
│   ├── clan_maps_sort_by.py
│   ├── clan_point.py
│   ├── clan.py
│   ├── clan_ranking_response_clan_response_full_response_with_metadata_and_container.py
│   ├── clan_ranking_response.py
│   ├── clan_response_full.py
│   ├── clan_response_full_response_with_metadata.py
│   ├── clan_response.py
│   ├── clan_sort_by.py
│   ├── compact_leaderboard.py
│   ├── compact_leaderboard_response.py
│   ├── compact_score.py
│   ├── compact_score_response.py
│   ├── compact_score_response_response_with_metadata.py
│   ├── compact_song_response.py
│   ├── controller_enum.py
│   ├── criteria_commentary.py
│   ├── difficulty_description.py
│   ├── difficulty_response.py
│   ├── difficulty_status.py
│   ├── event_player.py
│   ├── event_ranking.py
│   ├── external_status.py
│   ├── featured_playlist.py
│   ├── featured_playlist_response.py
│   ├── follower_type.py
│   ├── global_map_history.py
│   ├── history_compact_response.py
│   ├── hmd.py
│   ├── info_to_highlight.py
│   ├── __init__.py
│   ├── leaderboard_change.py
│   ├── leaderboard_clan_ranking_response.py
│   ├── leaderboard_contexts.py
│   ├── leaderboard_group_entry.py
│   ├── leaderboard_info_response.py
│   ├── leaderboard_info_response_response_with_metadata.py
│   ├── leaderboard.py
│   ├── leaderboard_response.py
│   ├── leaderboard_sort_by.py
│   ├── legacy_modifiers.py
│   ├── link_response.py
│   ├── map_diff_response.py
│   ├── map_info_response.py
│   ├── map_info_response_response_with_metadata.py
│   ├── mapper.py
│   ├── mapper_response.py
│   ├── map_quality.py
│   ├── map_sort_by.py
│   ├── maps_type.py
│   ├── metadata.py
│   ├── modifiers_map.py
│   ├── modifiers_rating.py
│   ├── my_type.py
│   ├── operation.py
│   ├── order.py
│   ├── participating_event_response.py
│   ├── patreon_features.py
│   ├── player_change.py
│   ├── player_context_extension.py
│   ├── player_follower.py
│   ├── player_followers_info_response.py
│   ├── player.py
│   ├── player_response_clan_response_full_response_with_metadata_and_container.py
│   ├── player_response_full.py
│   ├── player_response.py
│   ├── player_response_with_stats.py
│   ├── player_response_with_stats_response_with_metadata.py
│   ├── player_score_stats_history.py
│   ├── player_score_stats.py
│   ├── player_search.py
│   ├── player_social.py
│   ├── player_sort_by.py
│   ├── pp_type.py
│   ├── profile_settings.py
│   ├── qualification_change.py
│   ├── qualification_commentary.py
│   ├── qualification_vote.py
│   ├── ranked_mapper_response.py
│   ├── ranked_map.py
│   ├── rank_qualification.py
│   ├── rank_update_change.py
│   ├── rank_update.py
│   ├── rank_voting.py
│   ├── replay_offsets.py
│   ├── requirements.py
│   ├── score_filter_status.py
│   ├── score_graph_entry.py
│   ├── score_improvement.py
│   ├── score_metadata.py
│   ├── score_response.py
│   ├── score_response_with_acc.py
│   ├── score_response_with_my_score.py
│   ├── score_response_with_my_score_response_with_metadata.py
│   ├── scores_sort_by.py
│   ├── song.py
│   ├── song_response.py
│   ├── song_status.py
│   ├── type.py
│   └── voter_feedback.py
├── __pycache__
├── client.py
├── errors.py
├── __init__.py
├── py.typed
└── types.py
13 directories, 158 files
```
Here's the contents of ``:
```python
```
Here our attempt at using this client in ipython:
```python
```

File diff suppressed because one or more lines are too long

View File

@ -1,599 +0,0 @@
# New Python Class
We are working on this new Python class:
```python
import json
import os
import logging
from datetime import datetime, timedelta
from typing import Optional, Dict, Any
from clients.beatleader import client as beatleader_client
from clients.beatleader.api.player_scores import player_scores_get_compact_scores
from clients.beatleader.models.compact_score_response_response_with_metadata import CompactScoreResponseResponseWithMetadata
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=logging.DEBUG
)
class BeatLeaderAPI:
BASE_URL = "https://api.beatleader.xyz"
def __init__(self, cache_expiry_days: int = 1, cache_dir: Optional[str] = None):
self.client = beatleader_client.Client(base_url=self.BASE_URL)
self.cache_expiry_days = cache_expiry_days
self.CACHE_DIR = cache_dir or self._determine_cache_dir()
if not os.path.exists(self.CACHE_DIR):
os.makedirs(self.CACHE_DIR)
logging.info(f"Created cache directory: {self.CACHE_DIR}")
def _determine_cache_dir(self) -> str:
home_cache = os.path.expanduser("~/.cache")
beatleader_cache = os.path.join(home_cache, "beatleader")
if os.path.exists(home_cache):
if not os.path.exists(beatleader_cache):
try:
os.makedirs(beatleader_cache)
logging.info(f"Created cache directory: {beatleader_cache}")
except OSError as e:
logging.warning(f"Failed to create {beatleader_cache}: {e}")
return os.path.join(os.getcwd(), ".cache")
return beatleader_cache
else:
logging.info("~/.cache doesn't exist, using local .cache directory")
return os.path.join(os.getcwd(), ".cache")
def _get_cache_filename(self, player_id: str) -> str:
return os.path.join(self.CACHE_DIR, f"player_{player_id}_scores.json")
def _is_cache_valid(self, cache_file: str) -> bool:
if not os.path.exists(cache_file):
return False
file_modified_time = datetime.fromtimestamp(os.path.getmtime(cache_file))
return datetime.now() - file_modified_time < timedelta(days=self.cache_expiry_days)
def get_player_scores(
self,
player_id: str,
use_cache: bool = True,
count: int = 100,
sort: str = "recent",
max_pages: Optional[int] = None
) -> Dict[str, Any]:
"""
Fetches all player scores for a given player ID, handling pagination and caching.
:param player_id: The ScoreSaber player ID.
:param use_cache: Whether to use cached data if available.
:param limit: Number of scores per page.
:param sort: Sorting criteria.
:param max_pages: Maximum number of pages to fetch. Fetch all if None.
:return: A dictionary containing metadata and a list of player scores.
"""
cache_file = self._get_cache_filename(player_id)
if use_cache and self._is_cache_valid(cache_file):
logging.debug(f"Using cached data for player {player_id}")
with open(cache_file, 'r') as f:
return json.load(f)
logging.debug(f"Fetching fresh data for player {player_id}")
all_scores = []
page = 1
total_items = None
while max_pages is None or page <= max_pages:
try:
response: CompactScoreResponseResponseWithMetadata = player_scores_get_compact_scores.sync(
client=self.client,
id=player_id,
page=page,
count=count,
sort=sort
)
except Exception as e:
logging.error(f"Error fetching page {page} for player {player_id}: {e}")
return {"metadata": {}, "playerScores": []}
all_scores.extend(response.data)
if total_items is None:
total_items = response.metadata.total
logging.debug(f"Total scores to fetch: {total_items}")
logging.debug(f"Fetched page {page}: {len(response.data)} scores")
if len(all_scores) >= total_items:
break
page += 1
result = {
'metadata': {
'itemsPerPage': response.metadata.items_per_page,
'page': response.metadata.page,
'total': response.metadata.total
},
'playerScores': all_scores
}
with open(cache_file, 'w') as f:
json.dump(result, f, default=str) # default=str to handle datetime serialization
logging.info(f"Cached scores for player {player_id} at {cache_file}")
return result
```
Here is `src/clients/beatleader/api/player_scores/player_scores_get_compact_scores.py`:
```python
from http import HTTPStatus
from typing import Any, Dict, Optional, Union, cast
import httpx
from ... import errors
from ...client import AuthenticatedClient, Client
from ...models.compact_score_response_response_with_metadata import CompactScoreResponseResponseWithMetadata
from ...models.difficulty_status import DifficultyStatus
from ...models.leaderboard_contexts import LeaderboardContexts
from ...models.order import Order
from ...models.requirements import Requirements
from ...models.score_filter_status import ScoreFilterStatus
from ...models.scores_sort_by import ScoresSortBy
from ...types import UNSET, Response, Unset
def _get_kwargs(
id: str,
*,
sort_by: Union[Unset, ScoresSortBy] = UNSET,
order: Union[Unset, Order] = UNSET,
page: Union[Unset, int] = 1,
count: Union[Unset, int] = 8,
search: Union[Unset, str] = UNSET,
diff: Union[Unset, str] = UNSET,
mode: Union[Unset, str] = UNSET,
requirements: Union[Unset, Requirements] = UNSET,
score_status: Union[Unset, ScoreFilterStatus] = UNSET,
leaderboard_context: Union[Unset, LeaderboardContexts] = UNSET,
type: Union[Unset, DifficultyStatus] = UNSET,
modifiers: Union[Unset, str] = UNSET,
stars_from: Union[Unset, float] = UNSET,
stars_to: Union[Unset, float] = UNSET,
time_from: Union[Unset, int] = UNSET,
time_to: Union[Unset, int] = UNSET,
event_id: Union[Unset, int] = UNSET,
) -> Dict[str, Any]:
params: Dict[str, Any] = {}
json_sort_by: Union[Unset, str] = UNSET
if not isinstance(sort_by, Unset):
json_sort_by = sort_by.value
params["sortBy"] = json_sort_by
json_order: Union[Unset, str] = UNSET
if not isinstance(order, Unset):
json_order = order.value
params["order"] = json_order
params["page"] = page
params["count"] = count
params["search"] = search
params["diff"] = diff
params["mode"] = mode
json_requirements: Union[Unset, str] = UNSET
if not isinstance(requirements, Unset):
json_requirements = requirements.value
params["requirements"] = json_requirements
json_score_status: Union[Unset, str] = UNSET
if not isinstance(score_status, Unset):
json_score_status = score_status.value
params["scoreStatus"] = json_score_status
json_leaderboard_context: Union[Unset, str] = UNSET
if not isinstance(leaderboard_context, Unset):
json_leaderboard_context = leaderboard_context.value
params["leaderboardContext"] = json_leaderboard_context
json_type: Union[Unset, str] = UNSET
if not isinstance(type, Unset):
json_type = type.value
params["type"] = json_type
params["modifiers"] = modifiers
params["stars_from"] = stars_from
params["stars_to"] = stars_to
params["time_from"] = time_from
params["time_to"] = time_to
params["eventId"] = event_id
params = {k: v for k, v in params.items() if v is not UNSET and v is not None}
_kwargs: Dict[str, Any] = {
"method": "get",
"url": f"/player/{id}/scores/compact",
"params": params,
}
return _kwargs
def _parse_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Optional[Union[Any, CompactScoreResponseResponseWithMetadata]]:
if response.status_code == HTTPStatus.OK:
response_200 = CompactScoreResponseResponseWithMetadata.from_dict(response.json())
return response_200
if response.status_code == HTTPStatus.BAD_REQUEST:
response_400 = cast(Any, None)
return response_400
if response.status_code == HTTPStatus.NOT_FOUND:
response_404 = cast(Any, None)
return response_404
if client.raise_on_unexpected_status:
raise errors.UnexpectedStatus(response.status_code, response.content)
else:
return None
def _build_response(
*, client: Union[AuthenticatedClient, Client], response: httpx.Response
) -> Response[Union[Any, CompactScoreResponseResponseWithMetadata]]:
return Response(
status_code=HTTPStatus(response.status_code),
content=response.content,
headers=response.headers,
parsed=_parse_response(client=client, response=response),
)
def sync_detailed(
id: str,
*,
client: Union[AuthenticatedClient, Client],
sort_by: Union[Unset, ScoresSortBy] = UNSET,
order: Union[Unset, Order] = UNSET,
page: Union[Unset, int] = 1,
count: Union[Unset, int] = 8,
search: Union[Unset, str] = UNSET,
diff: Union[Unset, str] = UNSET,
mode: Union[Unset, str] = UNSET,
requirements: Union[Unset, Requirements] = UNSET,
score_status: Union[Unset, ScoreFilterStatus] = UNSET,
leaderboard_context: Union[Unset, LeaderboardContexts] = UNSET,
type: Union[Unset, DifficultyStatus] = UNSET,
modifiers: Union[Unset, str] = UNSET,
stars_from: Union[Unset, float] = UNSET,
stars_to: Union[Unset, float] = UNSET,
time_from: Union[Unset, int] = UNSET,
time_to: Union[Unset, int] = UNSET,
event_id: Union[Unset, int] = UNSET,
) -> Response[Union[Any, CompactScoreResponseResponseWithMetadata]]:
"""Retrieve player's scores in a compact form
Fetches a paginated list of scores for a specified player ID. Returns less info to save bandwith or
processing time
Args:
id (str):
sort_by (Union[Unset, ScoresSortBy]):
order (Union[Unset, Order]): Represents the order in which values will be sorted.
page (Union[Unset, int]): Default: 1.
count (Union[Unset, int]): Default: 8.
search (Union[Unset, str]):
diff (Union[Unset, str]):
mode (Union[Unset, str]):
requirements (Union[Unset, Requirements]):
score_status (Union[Unset, ScoreFilterStatus]):
leaderboard_context (Union[Unset, LeaderboardContexts]):
type (Union[Unset, DifficultyStatus]): Represents the difficulty status of a map.
modifiers (Union[Unset, str]):
stars_from (Union[Unset, float]):
stars_to (Union[Unset, float]):
time_from (Union[Unset, int]):
time_to (Union[Unset, int]):
event_id (Union[Unset, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Union[Any, CompactScoreResponseResponseWithMetadata]]
"""
kwargs = _get_kwargs(
id=id,
sort_by=sort_by,
order=order,
page=page,
count=count,
search=search,
diff=diff,
mode=mode,
requirements=requirements,
score_status=score_status,
leaderboard_context=leaderboard_context,
type=type,
modifiers=modifiers,
stars_from=stars_from,
stars_to=stars_to,
time_from=time_from,
time_to=time_to,
event_id=event_id,
)
response = client.get_httpx_client().request(
**kwargs,
)
return _build_response(client=client, response=response)
def sync(
id: str,
*,
client: Union[AuthenticatedClient, Client],
sort_by: Union[Unset, ScoresSortBy] = UNSET,
order: Union[Unset, Order] = UNSET,
page: Union[Unset, int] = 1,
count: Union[Unset, int] = 8,
search: Union[Unset, str] = UNSET,
diff: Union[Unset, str] = UNSET,
mode: Union[Unset, str] = UNSET,
requirements: Union[Unset, Requirements] = UNSET,
score_status: Union[Unset, ScoreFilterStatus] = UNSET,
leaderboard_context: Union[Unset, LeaderboardContexts] = UNSET,
type: Union[Unset, DifficultyStatus] = UNSET,
modifiers: Union[Unset, str] = UNSET,
stars_from: Union[Unset, float] = UNSET,
stars_to: Union[Unset, float] = UNSET,
time_from: Union[Unset, int] = UNSET,
time_to: Union[Unset, int] = UNSET,
event_id: Union[Unset, int] = UNSET,
) -> Optional[Union[Any, CompactScoreResponseResponseWithMetadata]]:
"""Retrieve player's scores in a compact form
Fetches a paginated list of scores for a specified player ID. Returns less info to save bandwith or
processing time
Args:
id (str):
sort_by (Union[Unset, ScoresSortBy]):
order (Union[Unset, Order]): Represents the order in which values will be sorted.
page (Union[Unset, int]): Default: 1.
count (Union[Unset, int]): Default: 8.
search (Union[Unset, str]):
diff (Union[Unset, str]):
mode (Union[Unset, str]):
requirements (Union[Unset, Requirements]):
score_status (Union[Unset, ScoreFilterStatus]):
leaderboard_context (Union[Unset, LeaderboardContexts]):
type (Union[Unset, DifficultyStatus]): Represents the difficulty status of a map.
modifiers (Union[Unset, str]):
stars_from (Union[Unset, float]):
stars_to (Union[Unset, float]):
time_from (Union[Unset, int]):
time_to (Union[Unset, int]):
event_id (Union[Unset, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Union[Any, CompactScoreResponseResponseWithMetadata]
"""
return sync_detailed(
id=id,
client=client,
sort_by=sort_by,
order=order,
page=page,
count=count,
search=search,
diff=diff,
mode=mode,
requirements=requirements,
score_status=score_status,
leaderboard_context=leaderboard_context,
type=type,
modifiers=modifiers,
stars_from=stars_from,
stars_to=stars_to,
time_from=time_from,
time_to=time_to,
event_id=event_id,
).parsed
async def asyncio_detailed(
id: str,
*,
client: Union[AuthenticatedClient, Client],
sort_by: Union[Unset, ScoresSortBy] = UNSET,
order: Union[Unset, Order] = UNSET,
page: Union[Unset, int] = 1,
count: Union[Unset, int] = 8,
search: Union[Unset, str] = UNSET,
diff: Union[Unset, str] = UNSET,
mode: Union[Unset, str] = UNSET,
requirements: Union[Unset, Requirements] = UNSET,
score_status: Union[Unset, ScoreFilterStatus] = UNSET,
leaderboard_context: Union[Unset, LeaderboardContexts] = UNSET,
type: Union[Unset, DifficultyStatus] = UNSET,
modifiers: Union[Unset, str] = UNSET,
stars_from: Union[Unset, float] = UNSET,
stars_to: Union[Unset, float] = UNSET,
time_from: Union[Unset, int] = UNSET,
time_to: Union[Unset, int] = UNSET,
event_id: Union[Unset, int] = UNSET,
) -> Response[Union[Any, CompactScoreResponseResponseWithMetadata]]:
"""Retrieve player's scores in a compact form
Fetches a paginated list of scores for a specified player ID. Returns less info to save bandwith or
processing time
Args:
id (str):
sort_by (Union[Unset, ScoresSortBy]):
order (Union[Unset, Order]): Represents the order in which values will be sorted.
page (Union[Unset, int]): Default: 1.
count (Union[Unset, int]): Default: 8.
search (Union[Unset, str]):
diff (Union[Unset, str]):
mode (Union[Unset, str]):
requirements (Union[Unset, Requirements]):
score_status (Union[Unset, ScoreFilterStatus]):
leaderboard_context (Union[Unset, LeaderboardContexts]):
type (Union[Unset, DifficultyStatus]): Represents the difficulty status of a map.
modifiers (Union[Unset, str]):
stars_from (Union[Unset, float]):
stars_to (Union[Unset, float]):
time_from (Union[Unset, int]):
time_to (Union[Unset, int]):
event_id (Union[Unset, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Response[Union[Any, CompactScoreResponseResponseWithMetadata]]
"""
kwargs = _get_kwargs(
id=id,
sort_by=sort_by,
order=order,
page=page,
count=count,
search=search,
diff=diff,
mode=mode,
requirements=requirements,
score_status=score_status,
leaderboard_context=leaderboard_context,
type=type,
modifiers=modifiers,
stars_from=stars_from,
stars_to=stars_to,
time_from=time_from,
time_to=time_to,
event_id=event_id,
)
response = await client.get_async_httpx_client().request(**kwargs)
return _build_response(client=client, response=response)
async def asyncio(
id: str,
*,
client: Union[AuthenticatedClient, Client],
sort_by: Union[Unset, ScoresSortBy] = UNSET,
order: Union[Unset, Order] = UNSET,
page: Union[Unset, int] = 1,
count: Union[Unset, int] = 8,
search: Union[Unset, str] = UNSET,
diff: Union[Unset, str] = UNSET,
mode: Union[Unset, str] = UNSET,
requirements: Union[Unset, Requirements] = UNSET,
score_status: Union[Unset, ScoreFilterStatus] = UNSET,
leaderboard_context: Union[Unset, LeaderboardContexts] = UNSET,
type: Union[Unset, DifficultyStatus] = UNSET,
modifiers: Union[Unset, str] = UNSET,
stars_from: Union[Unset, float] = UNSET,
stars_to: Union[Unset, float] = UNSET,
time_from: Union[Unset, int] = UNSET,
time_to: Union[Unset, int] = UNSET,
event_id: Union[Unset, int] = UNSET,
) -> Optional[Union[Any, CompactScoreResponseResponseWithMetadata]]:
"""Retrieve player's scores in a compact form
Fetches a paginated list of scores for a specified player ID. Returns less info to save bandwith or
processing time
Args:
id (str):
sort_by (Union[Unset, ScoresSortBy]):
order (Union[Unset, Order]): Represents the order in which values will be sorted.
page (Union[Unset, int]): Default: 1.
count (Union[Unset, int]): Default: 8.
search (Union[Unset, str]):
diff (Union[Unset, str]):
mode (Union[Unset, str]):
requirements (Union[Unset, Requirements]):
score_status (Union[Unset, ScoreFilterStatus]):
leaderboard_context (Union[Unset, LeaderboardContexts]):
type (Union[Unset, DifficultyStatus]): Represents the difficulty status of a map.
modifiers (Union[Unset, str]):
stars_from (Union[Unset, float]):
stars_to (Union[Unset, float]):
time_from (Union[Unset, int]):
time_to (Union[Unset, int]):
event_id (Union[Unset, int]):
Raises:
errors.UnexpectedStatus: If the server returns an undocumented status code and Client.raise_on_unexpected_status is True.
httpx.TimeoutException: If the request takes longer than Client.timeout.
Returns:
Union[Any, CompactScoreResponseResponseWithMetadata]
"""
return (
await asyncio_detailed(
id=id,
client=client,
sort_by=sort_by,
order=order,
page=page,
count=count,
search=search,
diff=diff,
mode=mode,
requirements=requirements,
score_status=score_status,
leaderboard_context=leaderboard_context,
type=type,
modifiers=modifiers,
stars_from=stars_from,
stars_to=stars_to,
time_from=time_from,
time_to=time_to,
event_id=event_id,
)
).parsed
```
Please review get_player_scores(), we wonder if the sort option is done correctly.

View File

@ -12,11 +12,11 @@ license = { file = "LICENSE" }
keywords = ["reddit"]
dependencies = [
'build>=1.2.1',
'requests>=2.31.0',
'pytest>=8.1.1',
'python-dotenv>=1.0.1',
'PyScoreSaber>=1.0.10',
'beatsaver>=1.0.1'
'requests>=2.31.0',
# 'PyScoreSaber>=1.0.10',
# 'beatsaver>=1.0.1'
]
requires-python = ">=3.8.10"
classifiers = [

View File

@ -13,27 +13,27 @@ from ...types import UNSET, Response, Unset
def _get_kwargs(
*,
after: Union[Unset, datetime.datetime] = UNSET,
client: Client,
before: Union[Unset, None, datetime.datetime] = UNSET,
after: Union[Unset, None, datetime.datetime] = UNSET,
automapper: Union[Unset, bool] = UNSET,
before: Union[Unset, datetime.datetime] = UNSET,
page_size: Union[Unset, int] = 20,
sort: Union[Unset, GetMapsLatestSort] = UNSET,
verified: Union[Unset, bool] = UNSET,
) -> Dict[str, Any]:
params: Dict[str, Any] = {}
json_before: Union[Unset, None, str] = UNSET
if not isinstance(before, Unset):
json_before = before.replace(microsecond=0).isoformat() if before else None
params["before"] = json_before
json_after: Union[Unset, str] = UNSET
json_after: Union[Unset, None, str] = UNSET
if not isinstance(after, Unset):
json_after = after.isoformat()
json_after = after.replace(microsecond=0).isoformat() if after else None
params["after"] = json_after
params["automapper"] = automapper
json_before: Union[Unset, str] = UNSET
if not isinstance(before, Unset):
json_before = before.isoformat()
params["before"] = json_before
params["pageSize"] = page_size
json_sort: Union[Unset, str] = UNSET
@ -109,6 +109,7 @@ def sync_detailed(
"""
kwargs = _get_kwargs(
client=client, # Add this line
after=after,
automapper=automapper,
before=before,
@ -194,6 +195,7 @@ async def asyncio_detailed(
"""
kwargs = _get_kwargs(
client=client, # Add this line
after=after,
automapper=automapper,
before=before,

View File

@ -1,4 +1,5 @@
from enum import Enum
import warnings
class MapDetailTagsItem(str, Enum):
@ -46,6 +47,15 @@ class MapDetailTagsItem(str, Enum):
TECHNO = "Techno"
TRANCE = "Trance"
VOCALOID = "Vocaloid"
UNKNOWN = "Unknown" # Add this new value
def __str__(self) -> str:
return str(self.value)
@classmethod
def _missing_(cls, value):
for member in cls:
if member.value.lower() == value.lower():
return member
warnings.warn(f"Unknown tag value: {value}. Using 'Unknown' instead.", UserWarning)
return cls.UNKNOWN

View File

@ -6,7 +6,9 @@ from typing import Optional, Dict, Any
from clients.beatleader import client as beatleader_client
from clients.beatleader.api.player_scores import player_scores_get_compact_scores
from clients.beatleader.api.leaderboard import leaderboard_get
from clients.beatleader.models.compact_score_response_response_with_metadata import CompactScoreResponseResponseWithMetadata
from clients.beatleader.models.leaderboard_response import LeaderboardResponse
from clients.beatleader.models.scores_sort_by import ScoresSortBy
from clients.beatleader.models.order import Order
@ -127,4 +129,54 @@ class BeatLeaderAPI:
logging.info(f"Cached scores for player {player_id} at {cache_file}")
return result
return result
def get_leaderboard(self, leaderboard_id: str, use_cache: bool = True) -> Dict[str, Any]:
"""
Fetches the leaderboard for a given leaderboard ID, with caching support.
:param leaderboard_id: The BeatLeader leaderboard ID.
:param use_cache: Whether to use cached data if available.
:return: A dictionary containing leaderboard data.
"""
cache_file = os.path.join(self.CACHE_DIR, f"leaderboard_{leaderboard_id}.json")
if use_cache and self._is_cache_valid(cache_file):
logging.debug(f"Using cached data for leaderboard {leaderboard_id}")
with open(cache_file, 'r') as f:
return json.load(f)
logging.debug(f"Fetching fresh data for leaderboard {leaderboard_id}")
try:
response = leaderboard_get.sync(
client=self.client,
id=leaderboard_id
)
logging.debug(f"Response type: {type(response)}")
logging.debug(f"Response content: {response}")
if isinstance(response, str):
try:
# Attempt to parse the string as JSON
result = json.loads(response)
except json.JSONDecodeError:
logging.error(f"Failed to parse response as JSON: {response}")
return {"error": "Invalid JSON response"}
elif isinstance(response, LeaderboardResponse):
result = response.to_dict()
else:
logging.error(f"Unexpected response type: {type(response)}")
return {"error": "Unexpected response type"}
with open(cache_file, 'w') as f:
json.dump(result, f, default=str)
logging.info(f"Cached leaderboard {leaderboard_id} at {cache_file}")
return result
except Exception as e:
logging.error(f"Error fetching leaderboard {leaderboard_id}: {e}")
return {"error": str(e)}

181
src/helpers/BeatSaverAPI.py Normal file
View File

@ -0,0 +1,181 @@
import json
import os
import logging
from datetime import datetime, timedelta, timezone
from typing import Optional
from dateutil.relativedelta import relativedelta
from time import sleep
from os import environ
from clients.beatsaver.client import Client as beatsaver_client
from clients.beatsaver.api.maps import get_maps_latest
from clients.beatsaver.models import MapDetail, GetMapsLatestSort
LOG_LEVEL = environ.get("LOG_LEVEL", "INFO")
logging.basicConfig(
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
level=LOG_LEVEL
)
class BeatSaverAPI:
BASE_URL = "https://api.beatsaver.com"
def __init__(self, cache_expiry_days: int = 1, cache_dir: Optional[str] = None):
self.client = beatsaver_client(base_url=self.BASE_URL)
self.cache_expiry_days = cache_expiry_days
self.CACHE_DIR = cache_dir or self._determine_cache_dir()
if not os.path.exists(self.CACHE_DIR):
os.makedirs(self.CACHE_DIR)
logging.info(f"Created cache directory: {self.CACHE_DIR}")
def _determine_cache_dir(self) -> str:
home_cache = os.path.expanduser("~/.cache")
beatsaver_cache = os.path.join(home_cache, "beatsaver")
if os.path.exists(home_cache):
if not os.path.exists(beatsaver_cache):
try:
os.makedirs(beatsaver_cache)
logging.info(f"Created cache directory: {beatsaver_cache}")
except OSError as e:
logging.warning(f"Failed to create {beatsaver_cache}: {e}")
return os.path.join(os.getcwd(), ".cache")
return beatsaver_cache
else:
logging.info("~/.cache doesn't exist, using local .cache directory")
return os.path.join(os.getcwd(), ".cache")
def _get_cache_filename(self, map_id: str) -> str:
return os.path.join(self.CACHE_DIR, f"map_{map_id}.json")
def _is_cache_valid(self, cache_file: str) -> bool:
if not os.path.exists(cache_file):
return False
file_modified_time = datetime.fromtimestamp(os.path.getmtime(cache_file))
return datetime.now() - file_modified_time < timedelta(days=self.cache_expiry_days)
def get_maps(
self,
use_cache: bool = True,
page_size: int = 20,
sort: GetMapsLatestSort = GetMapsLatestSort.UPDATED,
verified: bool = True,
month: Optional[int] = None,
year: Optional[int] = None,
max_pages: int = 50
) -> list[MapDetail]:
"""
Fetches maps from BeatSaver for a specific month, handling pagination and caching.
:param use_cache: Whether to use cached data if available.
:param page_size: Number of maps per page.
:param sort: Sorting criteria.
:param verified: Whether to include maps made by verified mappers.
:param month: The month to fetch maps for (1-12). Defaults to last month if None.
:param year: The year to fetch maps for. Defaults to current year if None.
:param max_pages: Maximum number of pages to fetch.
:return: A list of MapDetail objects.
"""
now = datetime.now(timezone.utc)
if month is None or year is None:
target_date = now - relativedelta(months=1)
year = target_date.year
month = target_date.month
start_of_month = datetime(year, month, 1, tzinfo=timezone.utc)
end_of_month = start_of_month + relativedelta(months=1)
cache_file = os.path.join(self.CACHE_DIR, f"maps_{year}_{month:02d}.json")
cached_maps = []
if use_cache and self._is_cache_valid(cache_file):
logging.debug(f"Using cached data for maps (for {year}-{month:02d})")
with open(cache_file, 'r') as f:
cached_maps = [MapDetail.from_dict(map_data) for map_data in json.load(f)]
if cached_maps:
return cached_maps
all_maps = cached_maps.copy()
fetched_pages = 0
last_map_date = end_of_month
while fetched_pages < max_pages:
maps = get_maps_latest.sync(
client=self.client,
page_size=page_size,
sort=sort,
verified=verified,
automapper=False,
before=last_map_date,
)
"""Sample result:
>>> maps
SearchResponse(docs=[MapDetail(automapper=False, bl_qualified=False, bl_ranked=False, bookmarked=False, collaborators=[], created_at=datetime.datetime(2024, 10, 9, 21, 30, 16, 714066, tzinfo=tzutc()), curated_at=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, curator=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, declared_ai=<MapDetailDeclaredAi.NONE: 'None'>, deleted_at=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, description='', id='40c79', last_published_at=datetime.datetime(2024, 10, 9, 21, 30, 42, 709374, tzinfo=tzutc()), metadata=MapDetailMetadata(bpm=105.0, duration=153, level_author_name='PsychItsMike', song_author_name='TWRP', song_name='Content 4 U', song_sub_name='', additional_properties={}), name='TWRP - Content 4 U', qualified=False, ranked=False, stats=MapStats(downloads=0, downvotes=0, plays=0, reviews=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, score=0.5, score_one_dp=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, sentiment=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, upvotes=0, additional_properties={}), tags=[], updated_at=datetime.datetime(2024, 10, 9, 21, 30, 42, 709374, tzinfo=tzutc()), uploaded=datetime.datetime(2024, 10, 9, 21, 30, 42, 709373, tzinfo=tzutc()), uploader=UserDetail(admin=False, avatar='https://www.gravatar.com/avatar/5cff0b7698cc5a672c8544f0?d=retro', curator=False, curator_tab=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, description=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, email=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, follow_data=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, hash_='5cff0b7698cc5a672c8544f0', id=42848, name='psychitsmike', patreon=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, playlist_url='https://api.beatsaver.com/users/id/42848/playlist', senior_curator=False, stats=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, suspended_at=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, testplay=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, type=<UserDetailType.SIMPLE: 'SIMPLE'>, unique_set=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, upload_limit=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, verified_mapper=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, additional_properties={}), versions=[MapVersion(cover_url='https://na.cdn.beatsaver.com/c70447aa7c24e526bd2ef5ba9c647b833c5431d5.jpg', created_at=datetime.datetime(2024, 10, 9, 21, 30, 16, 714066, tzinfo=tzutc()), diffs=[MapDifficulty(bl_stars=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, bombs=5, characteristic=<MapDifficultyCharacteristic.STANDARD: 'Standard'>, chroma=False, cinema=False, difficulty=<MapDifficultyDifficulty.EXPERT: 'Expert'>, events=2382, label=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, length=255.0, max_score=507035, me=False, ne=False, njs=14.0, notes=559, nps=3.836, obstacles=10, offset=0.0, parity_summary=MapParitySummary(errors=0, resets=0, warns=0, additional_properties={}), seconds=145.714, stars=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, additional_properties={})], download_url='https://r2cdn.beatsaver.com/c70447aa7c24e526bd2ef5ba9c647b833c5431d5.zip', feedback=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, hash_='c70447aa7c24e526bd2ef5ba9c647b833c5431d5', key=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, preview_url='https://na.cdn.beatsaver.com/c70447aa7c24e526bd2ef5ba9c647b833c5431d5.mp3', sage_score=2, scheduled_at=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, state=<MapVersionState.PUBLISHED: 'Published'>, testplay_at=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, testplays=[], additional_properties={})], additional_properties={})], redirect=<clients.beatsaver.types.Unset object at 0x7fe4d3e8b810>, additional_properties={})
"""
if not maps.docs:
logging.warn("No more maps found.")
break
for map in maps.docs:
logging.debug(
f"Fetched map: '{map.name}' by {map.metadata.song_author_name} [{map.uploader.name}, "
# f"created at {map.created_at.strftime('%Y-%m-%dT%H:%M:%S')}, "
# f"last published at {map.last_published_at.strftime('%Y-%m-%dT%H:%M:%S')}, "
f"updated at {map.updated_at.strftime('%m-%d %H:%M')}, "
# f"uploaded on {map.uploaded.strftime('%Y-%m-%dT%H:%M:%S')}"
)
new_maps = [map for map in maps.docs if map not in all_maps]
logging.info(f"Fetched {len(new_maps)} new maps.")
all_maps.extend(new_maps)
if new_maps:
last_map_date = new_maps[-1].updated_at.replace(tzinfo=timezone.utc)
else:
logging.info("No new maps in this batch, stopping.")
break
if last_map_date <= start_of_month:
logging.info(f"Reached or passed the start of the month ({start_of_month}).")
break
fetched_pages += 1
sleep(1)
logging.debug(f"Total maps fetched: {len(all_maps)}")
# Filter maps to ensure they're within the target month
all_maps = [map for map in all_maps if start_of_month <= map.updated_at.replace(tzinfo=timezone.utc) <= end_of_month]
logging.debug(f"Total maps after filtering: {len(all_maps)}")
# Cache the results
with open(cache_file, 'w') as f:
json.dump([map.to_dict() for map in all_maps], f)
logging.info(f"Cached {len(all_maps)} maps for {year}-{month:02d}")
return all_maps
"""TESTING
from src.helpers.BeatSaverAPI import *
use_cache: bool = True
page_size: int = 20
sort: GetMapsLatestSort = GetMapsLatestSort.FIRST_PUBLISHED
verified: bool = True
month: Optional[int] = None
year: Optional[int] = None
max_pages: int = 2
import sys
# Check if we're in a REPL environment
def is_running_in_repl():
return hasattr(sys, 'ps1') or sys.flags.interactive
# Create a module-level instance only if in a REPL
if is_running_in_repl():
self = BeatSaverAPI()
print("REPL environment detected. 'self' instance of BeatSaverAPI created for convenience.")
"""

View File

@ -5,6 +5,7 @@ import os
import random
import requests
import time
from time import sleep
import logging
logging.basicConfig(
@ -82,7 +83,7 @@ class SimpleBeatLeaderAPI:
break
page += 1
time.sleep(1) # Add a small delay to avoid rate limiting
sleep(1)
result = {
'metadata': {
@ -130,4 +131,48 @@ class SimpleBeatLeaderAPI:
return player_data
except requests.exceptions.RequestException as e:
logging.error(f"Error fetching player info for ID {player_id}: {e}")
return None
def get_leaderboard(self, hash, diff="ExpertPlus", mode="Standard", use_cache=True, page=1, count=10) -> list[dict]:
"""
Retrieve leaderboard for a specific map, with caching.
:param hash: Hash of the map
:param diff: Difficulty of the map (one of 'Easy', 'Normal', 'Hard', 'Expert', or 'ExpertPlus')
:param mode: Mode of the map (one of 'Standard', 'NoArrows', 'OneSaber', '90Degree', 'Lawless')
:param use_cache: Whether to use cached data if available (default: True)
:param page: Page number (default: 1)
:param count: Number of scores per page (default: 10)
:return: Dictionary containing leaderboard data
"""
cache_file = os.path.join(self.CACHE_DIR, f"leaderboard_{hash}_{diff}_{mode}.json")
if use_cache and self._is_cache_valid(cache_file):
logging.debug(f"Using cached data for leaderboard (hash: {hash}, diff: {diff}, mode: {mode})")
with open(cache_file, 'r') as f:
cached_data = json.load(f)
return cached_data.get('data', [])
logging.debug(f"Fetching fresh data for leaderboard (hash: {hash}, diff: {diff}, mode: {mode})")
url = f"{self.BASE_URL}/v5/scores/{hash}/{diff}/{mode}"
params = {
"page": page,
"count": count
}
try:
response = self.session.get(url, params=params)
response.raise_for_status()
leaderboard_data = response.json()
# Cache the results
with open(cache_file, 'w') as f:
json.dump(leaderboard_data, f)
sleep(1)
logging.debug(f"Cached leaderboard data for hash: {hash}, diff: {diff}, mode: {mode}")
return leaderboard_data.get('data', [])
except requests.exceptions.RequestException as e:
logging.error(f"Error fetching leaderboard for hash {hash}, diff {diff}, mode {mode}: {e}")
return None

View File

@ -1,11 +1,13 @@
from datetime import datetime, timedelta, timezone
from helpers.BeatSaverAPI import BeatSaverAPI
from helpers.SimpleBeatLeaderAPI import SimpleBeatLeaderAPI
from statistics import mean
from typing import Dict, Any, List
import argparse
import json
import logging
import os
import sys
import logging
from collections import defaultdict
from datetime import datetime, timedelta, timezone
from typing import Dict, Any, List
from dotenv import load_dotenv
load_dotenv()
@ -96,6 +98,18 @@ def normalize_difficulty_name(difficulty_name):
# Return the mapped value or the original name if there is no mapping
return difficulty_names.get(difficulty_name, difficulty_name)
"""deprecated in favor of using undocumented api call
def infer_beatleader_leaderboard_id(song_id: str, difficulty: str) -> str:
difficulty_map = {
'Easy': 1,
'Normal': 3,
'Hard': 5,
'Expert': 7,
'ExpertPlus': 9,
}
return f"{song_id}{difficulty_map[difficulty]}1"
"""
def playlist_strategy_scoresaber_oldscores(
api: ScoreSaberAPI,
song_count: int = 20 # Total number of songs to select
@ -284,6 +298,102 @@ def playlist_strategy_beatleader_oldscores(
return playlist_data
def map_leaders_by_month(month: int = 9, year: int = 2024) -> List[Dict]:
"""
Gathers a month's worth of maps using the BeatSaver latest maps endpoint,
prioritizes map difficulties where players have already set good scores,
and calculates the average accuracy for each map+difficulty.
Returns:
A list of dictionaries, each containing:
- map_name: Name of the map
- difficulty: Difficulty level
- average_accuracy: Average accuracy of the leaderboard
"""
beatleader_api = SimpleBeatLeaderAPI()
beatsaver_api = BeatSaverAPI()
map_data = beatsaver_api.get_maps(year=year, month=month)
collected_data = []
for map_entry in map_data:
# Ensure there are versions available
if not map_entry.versions:
logging.warning(f"No versions found for map: {map_entry.name}")
continue
latest_version = max(map_entry.versions, key=lambda version: version.created_at)
# latest_version_hash = latest_version.hash_
for diff in latest_version.diffs:
if diff.characteristic != 'Standard':
continue
leaderboard_data = beatleader_api.get_leaderboard(latest_version.hash_, diff.difficulty)
if not leaderboard_data:
logging.warning(f"No leaderboard data for {map_entry.name} [{diff.difficulty}]")
continue
# Calculate average accuracy
accuracies = [entry.get('accuracy', 0) for entry in leaderboard_data if 'accuracy' in entry]
if not accuracies:
logging.warning(f"No accuracy data for {map_entry.name} [{diff.difficulty}]")
continue
avg_accuracy = mean(accuracies)
collected_data.append({
'map_name': map_entry.name,
'difficulty': diff.difficulty,
'average_accuracy': avg_accuracy
})
logging.info(f"Collected {len(collected_data)} map+difficulty combinations by average accuracy for {month}/{year}.")
return collected_data
def playlist_strategy_highest_accuracy(count: int = 40) -> List[Dict]:
"""
Selects the top map+difficulty combinations with the highest average accuracy.
Args:
count: The number of map+difficulty combinations to select. Default is 40.
Returns:
A list of dictionaries containing the selected map+difficulty combinations,
each with:
- map_name: Name of the map
- difficulty: Difficulty level
- average_accuracy: Average accuracy of the leaderboard
"""
# Retrieve the collected map+difficulty data with average accuracies
map_difficulty_data = map_leaders_by_month()
if not map_difficulty_data:
logging.error("No map+difficulty data available to create a playlist.")
return []
# Sort the data by average_accuracy in descending order
sorted_data = sorted(
map_difficulty_data,
key=lambda x: x['average_accuracy'],
reverse=True
)
# Select the top 'count' entries
selected_playlist = sorted_data[:count]
# Log the selected playlist
logging.info(f"Selected top {count} map+difficulty combinations by average accuracy:")
for idx, entry in enumerate(selected_playlist, start=1):
logging.info(
f"{idx}. {entry['map_name']} [{entry['difficulty']}] - "
f"Average Accuracy: {entry['average_accuracy'] * 100:.2f}%"
)
return selected_playlist
def saberlist() -> None:
"""
Generate a playlist of songs from a range of difficulties, all with scores previously set a long time ago.