202 lines
7.3 KiB
Python
202 lines
7.3 KiB
Python
import requests
|
|
import json
|
|
import os
|
|
from datetime import datetime, timedelta
|
|
import logging
|
|
import time
|
|
import logging
|
|
logging.basicConfig(
|
|
format='%(asctime)s %(levelname)s: %(message)s',
|
|
datefmt='%Y-%m-%d %H:%M:%S',
|
|
level=logging.DEBUG
|
|
)
|
|
|
|
class BeatLeaderAPI:
|
|
BASE_URL = "https://api.beatleader.xyz"
|
|
|
|
def __init__(self, cache_expiry_days=1):
|
|
self.session = requests.Session()
|
|
self.cache_expiry_days = cache_expiry_days
|
|
self.CACHE_DIR = self._determine_cache_dir()
|
|
if not os.path.exists(self.CACHE_DIR):
|
|
os.makedirs(self.CACHE_DIR)
|
|
|
|
def _determine_cache_dir(self):
|
|
home_cache = os.path.expanduser("~/.cache")
|
|
saberlist_cache = os.path.join(home_cache, "saberlist")
|
|
|
|
if os.path.exists(home_cache):
|
|
if not os.path.exists(saberlist_cache):
|
|
try:
|
|
os.makedirs(saberlist_cache)
|
|
logging.info(f"Created cache directory: {saberlist_cache}")
|
|
except OSError as e:
|
|
logging.warning(f"Failed to create {saberlist_cache}: {e}")
|
|
return os.path.join(os.getcwd(), ".cache")
|
|
return saberlist_cache
|
|
else:
|
|
logging.info("~/.cache doesn't exist, using local .cache directory")
|
|
return os.path.join(os.getcwd(), ".cache")
|
|
|
|
def _get_cache_filename(self, player_id):
|
|
return os.path.join(self.CACHE_DIR, f"player_{player_id}_scores.json")
|
|
|
|
def _is_cache_valid(self, cache_file):
|
|
if not os.path.exists(cache_file):
|
|
return False
|
|
file_modified_time = datetime.fromtimestamp(os.path.getmtime(cache_file))
|
|
return datetime.now() - file_modified_time < timedelta(days=self.cache_expiry_days)
|
|
|
|
def get_player_scores(self, player_id, use_cache=True, page_size=100, max_pages=None):
|
|
cache_file = self._get_cache_filename(player_id)
|
|
|
|
if use_cache and self._is_cache_valid(cache_file):
|
|
logging.debug(f"Using cached data for player {player_id}")
|
|
with open(cache_file, 'r') as f:
|
|
return json.load(f)
|
|
|
|
logging.debug(f"Fetching fresh data for player {player_id}")
|
|
url = f"{self.BASE_URL}/player/{player_id}/scores"
|
|
|
|
all_scores = []
|
|
page = 1
|
|
total_items = None
|
|
|
|
while max_pages is None or page <= max_pages:
|
|
params = {
|
|
"page": page,
|
|
"count": page_size
|
|
}
|
|
response = self.session.get(url, params=params)
|
|
response.raise_for_status()
|
|
data = response.json()
|
|
|
|
all_scores.extend(data['data'])
|
|
|
|
if total_items is None:
|
|
total_items = data['metadata']['total']
|
|
|
|
if len(all_scores) >= total_items:
|
|
break
|
|
|
|
page += 1
|
|
time.sleep(1) # Add a small delay to avoid rate limiting
|
|
|
|
result = {
|
|
'metadata': {
|
|
'total': total_items,
|
|
'itemsPerPage': page_size,
|
|
'page': page
|
|
},
|
|
'data': all_scores
|
|
}
|
|
|
|
with open(cache_file, 'w') as f:
|
|
json.dump(result, f)
|
|
|
|
return result
|
|
|
|
|
|
def get_player_songs(self, player_id, page=1, count=100, use_cache=True):
|
|
scores = self.get_player_scores(player_id, page, count, use_cache)
|
|
songs = []
|
|
song_ids = set()
|
|
|
|
for score in scores['data']:
|
|
song = score['leaderboard']['song']
|
|
if song['id'] not in song_ids:
|
|
songs.append(song)
|
|
song_ids.add(song['id'])
|
|
|
|
return songs
|
|
|
|
def get_player_songs_minimal(self, player_id, page=1, count=100, use_cache=True):
|
|
full_songs = self.get_player_songs(player_id, page, count, use_cache)
|
|
return [self._get_minimal_song_data(song) for song in full_songs]
|
|
|
|
def _get_minimal_song_data(self, song):
|
|
return {
|
|
'id': song['id'],
|
|
'name': song['name'],
|
|
'author': song['author'],
|
|
'mapper': song['mapper'],
|
|
'hash': song['hash'],
|
|
'bpm': song['bpm'],
|
|
'duration': song['duration']
|
|
}
|
|
|
|
def clear_cache(self, player_id=None):
|
|
if player_id:
|
|
cache_file = self._get_cache_filename(player_id)
|
|
if os.path.exists(cache_file):
|
|
os.remove(cache_file)
|
|
logging.debug(f"Cleared cache for player {player_id}")
|
|
else:
|
|
for file in os.listdir(self.CACHE_DIR):
|
|
os.remove(os.path.join(self.CACHE_DIR, file))
|
|
logging.debug("Cleared all cache")
|
|
|
|
def get_cache_dir(self):
|
|
return self.CACHE_DIR
|
|
|
|
def create_bplist(self, scores, playlist_title="playlist", playlist_author="SaberList Tool", song_limit=0):
|
|
"""
|
|
Create a bplist (JSON) file in the current directory from the given scores data.
|
|
|
|
:param scores: List of score data from get_player_scores
|
|
:param playlist_title: Title of the playlist (default: "playlist")
|
|
:param playlist_author: Author of the playlist (default: "SaberList Tool")
|
|
:param song_limit: Maximum number of songs to include (0 for no limit)
|
|
:return: Path to the created bplist file
|
|
"""
|
|
playlist = {
|
|
"playlistTitle": playlist_title,
|
|
"playlistAuthor": playlist_author,
|
|
"songs": []
|
|
}
|
|
|
|
# Determine the number of songs to include
|
|
num_songs = len(scores) if song_limit == 0 else min(song_limit, len(scores))
|
|
|
|
for score in scores[:num_songs]:
|
|
song = score['leaderboard']['song']
|
|
difficulty = score['leaderboard']['difficulty']
|
|
|
|
song_entry = {
|
|
"hash": song['hash'],
|
|
"songName": song['name'],
|
|
"difficulties": [
|
|
{
|
|
"name": difficulty['difficultyName'].lower(),
|
|
"characteristic": difficulty['modeName'].lower()
|
|
}
|
|
],
|
|
"levelAuthorName": song['mapper']
|
|
}
|
|
playlist["songs"].append(song_entry)
|
|
|
|
# Generate a unique filename
|
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
filename = f"{playlist_title}_{timestamp}.bplist"
|
|
|
|
# Write the playlist to a file
|
|
with open(filename, 'w') as file:
|
|
json.dump(playlist, file, indent=4)
|
|
|
|
logging.info(f"Playlist written to {filename} with {num_songs} songs")
|
|
|
|
return os.path.abspath(filename)
|
|
|
|
def create_player_playlist(self, player_id, playlist_title="playlist", playlist_author="SaberList Tool", song_limit=0, use_cache=True):
|
|
"""
|
|
Create a bplist (JSON) file for a player's scores.
|
|
|
|
:param player_id: ID of the player
|
|
:param playlist_title: Title of the playlist (default: "playlist")
|
|
:param playlist_author: Author of the playlist (default: "SaberList Tool")
|
|
:param song_limit: Maximum number of songs to include (0 for no limit)
|
|
:param use_cache: Whether to use cached scores data (default: True)
|
|
:return: Path to the created bplist file
|
|
"""
|
|
scores_data = self.get_player_scores(player_id, use_cache=use_cache)
|
|
return self.create_bplist(scores_data['data'], playlist_title, playlist_author, song_limit) |