import os, requests, json from bs4 import BeautifulSoup from funcs import download_file def get_data(username): url = f"https://www.snapchat.com/add/{username}" headers = {"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/129.0.0.0 Safari/537.36"} response = requests.get(url, headers=headers) soup = BeautifulSoup(response.text, "html.parser") data = soup.find("script", id="__NEXT_DATA__") data = json.loads(data.string) return data def parse_stories(stories): parsed_stories = [] for story in stories: snap_id = story['snapId']['value'] snap_url = story['snapUrls']['mediaUrl'] timestamp = story['timestampInSec']['value'] parsed_stories.append({"media_id": snap_id, "url": snap_url, "timestamp": timestamp}) return parsed_stories def get_stories(data): stories = data['props']['pageProps']['story']['snapList'] stories = parse_stories(stories) return stories def get_highlights(data): highlights = data['props']['pageProps']['curatedHighlights'] return highlights def get_highlight_stories(data): highlights = get_highlights(data) stories = [] for highlight in highlights: stories.extend(parse_stories(highlight['snapList'])) return stories def main(): directory = "snapchat_stories" usernames = ['little.warren1', 'neiima22', 'awesome.nads', 'noordabash', 'aleximarianna', ] for username in usernames: print(f"Getting stories for {username}...") data = get_data(username) print("Getting stories...") stories = get_stories(data) print("Getting highlights...") stories.extend(get_highlight_stories(data)) for story in stories: media_id = story['media_id'] url = story['url'] timestamp = story['timestamp'] filename = f"{media_id}.jpg" filepath = os.path.join(directory, filename) download_file(url, filepath) print(f"Downloaded {filename} at {timestamp}") if __name__ == "__main__": main()