|
3 | 3 | import json
|
4 | 4 | import random
|
5 | 5 | import string
|
| 6 | +import math |
6 | 7 |
|
7 | 8 | from ..eventhandlers import message_events
|
8 | 9 | from . import project
|
@@ -273,6 +274,97 @@ def projects(self, *, limit=40, offset=0):
|
273 | 274 | p["author"] = {"username":self.username}
|
274 | 275 | return commons.parse_object_list(_projects, project.Project, self._session)
|
275 | 276 |
|
| 277 | + def loves(self, *, limit=40, offset=0, get_full_project: bool=False) -> [project.Project]: |
| 278 | + """ |
| 279 | + Returns: |
| 280 | + list<projects.projects.Project>: The user's loved projects |
| 281 | + """ |
| 282 | + # We need to use beautifulsoup webscraping so we cant use the api_iterative function |
| 283 | + if offset < 0: |
| 284 | + raise exceptions.BadRequest("offset parameter must be >= 0") |
| 285 | + if limit < 0: |
| 286 | + raise exceptions.BadRequest("limit parameter must be >= 0") |
| 287 | + |
| 288 | + # There are 40 projects on display per page |
| 289 | + # So number of pages to view the limit is ceil(limit / 40) |
| 290 | + |
| 291 | + # The first page you need to view is 1 + offset // 40 |
| 292 | + # (You have to add one because the first page is idx 1 instead of 0) |
| 293 | + pages = range(1 + offset // 40, |
| 294 | + 1 + offset // 40 + math.ceil(limit / 40)) |
| 295 | + _projects = [] |
| 296 | + |
| 297 | + for page in pages: |
| 298 | + # The index of the first project on page #n is just (n-1) * 40 |
| 299 | + first_idx = (page - 1) * 40 |
| 300 | + |
| 301 | + page_content = requests.get(f"https://scratch.mit.edu/projects/all/{self.username}/loves/" |
| 302 | + f"?page={page}", headers=self._headers).content |
| 303 | + if b"Whoops! Our server is Scratch'ing its head" in page_content: |
| 304 | + # We've either tried to access a non-existent account or |
| 305 | + # the user hasn't loved enough projects (for the requested page to exist) |
| 306 | + # So we can just break out of the loop early |
| 307 | + break |
| 308 | + |
| 309 | + soup = BeautifulSoup( |
| 310 | + page_content, |
| 311 | + "html.parser" |
| 312 | + ) |
| 313 | + |
| 314 | + # Each project element is a list item with the class name 'project thumb item' so we can just use that |
| 315 | + for i, project_element in enumerate( |
| 316 | + soup.find_all("li", {"class": "project thumb item"})): |
| 317 | + # Remember we only want certain projects: |
| 318 | + # The current project idx = first_idx + i |
| 319 | + # We want to start at {offset} and end at {offset + limit} |
| 320 | + |
| 321 | + # So the offset <= current project idx <= offset + limit |
| 322 | + if offset <= first_idx + i <= offset + limit: |
| 323 | + # Each of these elements provides: |
| 324 | + # A project id |
| 325 | + # A thumbnail link (no need to webscrape this) |
| 326 | + # A title |
| 327 | + # An Author (called an owner for some reason) |
| 328 | + |
| 329 | + project_anchors = project_element.find_all("a") |
| 330 | + # Each list item has three <a> tags, the first two linking the project |
| 331 | + # 1st contains <img> tag |
| 332 | + # 2nd contains project title |
| 333 | + # 3rd links to the author & contains their username |
| 334 | + |
| 335 | + # This function is pretty handy! |
| 336 | + # I'll use it for an id from a string like: /projects/1070616180/ |
| 337 | + project_id = commons.webscrape_count(project_anchors[0].attrs["href"], |
| 338 | + "/projects/", "/") |
| 339 | + title = project_anchors[1].contents[0] |
| 340 | + author = project_anchors[2].contents[0] |
| 341 | + |
| 342 | + # Instantiating a project with the properties that we know |
| 343 | + # This may cause issues (see below) |
| 344 | + _project = project.Project(id=project_id, |
| 345 | + _session=self._session, |
| 346 | + title=title, |
| 347 | + author_name=author, |
| 348 | + url=f"https://scratch.mit.edu/projects/{project_id}/") |
| 349 | + if get_full_project: |
| 350 | + # Put this under an if statement since making api requests for every single |
| 351 | + # project will cause the function to take a lot longer |
| 352 | + _project.update() |
| 353 | + |
| 354 | + _projects.append( |
| 355 | + _project |
| 356 | + ) |
| 357 | + |
| 358 | + return _projects |
| 359 | + |
| 360 | + def loves_count(self): |
| 361 | + text = requests.get( |
| 362 | + f"https://scratch.mit.edu/projects/all/{self.username}/loves/", |
| 363 | + headers=self._headers |
| 364 | + ).text |
| 365 | + |
| 366 | + return commons.webscrape_count(text, "»\n\n (", ")") |
| 367 | + |
276 | 368 | def favorites(self, *, limit=40, offset=0):
|
277 | 369 | """
|
278 | 370 | Returns:
|
@@ -443,7 +535,7 @@ def reply_comment(self, content, *, parent_id, commentee_id=""):
|
443 | 535 |
|
444 | 536 | Warning:
|
445 | 537 | Only replies to top-level comments are shown on the Scratch website. Replies to replies are actually replies to the corresponding top-level comment in the API.
|
446 |
| - |
| 538 | +
|
447 | 539 | Therefore, parent_id should be the comment id of a top level comment.
|
448 | 540 |
|
449 | 541 | Args:
|
@@ -607,7 +699,7 @@ def comment_by_id(self, comment_id):
|
607 | 699 |
|
608 | 700 | Warning:
|
609 | 701 | For comments very far down on the user's profile, this method will take a while to find the comment. Very old comment are deleted from Scratch's database and may not appear.
|
610 |
| - |
| 702 | +
|
611 | 703 | Returns:
|
612 | 704 | scratchattach.comments.Comment: The request comment.
|
613 | 705 | """
|
@@ -675,11 +767,11 @@ def ocular_status(self):
|
675 | 767 | dict
|
676 | 768 | """
|
677 | 769 | return requests.get(f"https://my-ocular.jeffalo.net/api/user/{self.username}").json()
|
678 |
| - |
| 770 | + |
679 | 771 | def verify_identity(self, *, verification_project_id=395330233):
|
680 | 772 | """
|
681 | 773 | Can be used in applications to verify a user's identity.
|
682 |
| - |
| 774 | +
|
683 | 775 | This function returns a Verifactor object. Attributs of this object:
|
684 | 776 | :.projecturl: The link to the project where the user has to go to verify
|
685 | 777 | :.project: The project where the user has to go to verify as scratchattach.Project object
|
|
0 commit comments