Skip to content

Feature search_rect via API #151

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 63 additions & 11 deletions pycaching/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,43 @@ def _from_print_page(cls, geocaching, guid, soup):
cache_info["log_counts"] = Cache._get_log_counts_from_print_page(soup)
return Cache(geocaching, **cache_info)

@classmethod
def _from_api_record(cls, geocaching, record):
"""Create a cache instance from a JSON record returned by API."""
cache = Cache(
geocaching,
wp=record['code'],
name=record['name'],
type=Type.from_number(record['geocacheType']),
state=Status(record['cacheStatus']) == Status.enabled,
found=record['userFound'],
size=Size.from_number(record['containerType']),
difficulty=record['difficulty'],
terrain=record['terrain'],
author=record['owner']['username'],
hidden=record['placedDate'].split('T')[0],
favorites=record['favoritePoints'],
pm_only=record['premiumOnly'],

# Not consumed attributes:
# detailsUrl
# hasGeotour
# hasLogDraft
# id
# lastFoundDate
# owner.code
# userDidNotFind
)

# NOTE: Basic Members have no access to postedCoordinates of Premium-only caches
if 'postedCoordinates' in record:
cache.location = Point(
record['postedCoordinates']['latitude'],
record['postedCoordinates']['longitude']
)

return cache

def __init__(self, geocaching, wp, **kwargs):
"""Create a cache instance.

Expand Down Expand Up @@ -827,7 +864,7 @@ def load_by_guid(self):
type_img = os.path.basename(content.find("img").get("src"))
self.type = Type.from_filename(os.path.splitext(type_img)[0])

size_img = content.find("img", src=re.compile("\/icons\/container\/"))
size_img = content.find("img", src=re.compile(r"\/icons\/container\/"))
self.size = Size.from_string(size_img.get("alt").split(": ")[1])

D_and_T_img = content.find("p", "Meta DiffTerr").find_all("img")
Expand All @@ -843,7 +880,7 @@ def load_by_guid(self):
hidden_p = content.find("p", text=re.compile("Placed Date:"))
self.hidden = hidden_p.text.replace("Placed Date:", "").strip()

attr_img = content.find_all("img", src=re.compile("\/attributes\/"))
attr_img = content.find_all("img", src=re.compile(r"\/attributes\/"))
attributes_raw = [
os.path.basename(_.get("src")).rsplit("-", 1) for _ in attr_img
]
Expand Down Expand Up @@ -1004,12 +1041,12 @@ def load_logbook(self, limit=float("inf")):
img_filename = log_data["LogTypeImage"].rsplit(".", 1)[0] # filename w/o extension

# create and fill log object
l = Log()
l.type = LogType.from_filename(img_filename)
l.text = log_data["LogText"]
l.visited = log_data["Visited"]
l.author = log_data["UserName"]
yield l
log = Log()
log.type = LogType.from_filename(img_filename)
log.text = log_data["LogText"]
log.visited = log_data["Visited"]
log.author = log_data["UserName"]
yield log

# TODO: trackable list can have multiple pages - handle it in similar way as _logbook_get_page
# for example see: http://www.geocaching.com/geocache/GC26737_geocaching-jinak-tb-gc-hrbitov
Expand All @@ -1034,7 +1071,7 @@ def load_trackables(self, limit=float("inf")):
# filter out all urls for trackables
urls = [link.get("href") for link in links if "track" in link.get("href")]
# find the names matching the trackble urls
names = [re.split("[\<\>]", str(link))[2] for link in links if "track" in link.get("href")]
names = [re.split(r"[\<\>]", str(link))[2] for link in links if "track" in link.get("href")]

for name, url in zip(names, urls):

Expand Down Expand Up @@ -1280,6 +1317,10 @@ def from_string(cls, name):
except KeyError as e:
raise errors.ValueError("Unknown cache type '{}'.".format(name)) from e

@classmethod
def from_number(cls, number: int):
return Type(str(number))


class Size(enum.Enum):
"""Enum of possible cache sizes.
Expand Down Expand Up @@ -1322,14 +1363,25 @@ def from_number(cls, number):
number = int(number)

number_mapping = {
1: cls.not_chosen,
2: cls.micro,
8: cls.small,
3: cls.regular,
4: cls.large,
6: cls.other
5: cls.virtual,
6: cls.other,
8: cls.small,
}

try:
return number_mapping[number]
except KeyError as e:
raise errors.ValueError("Unknown cache size numeric id '{}'.".format(number)) from e


class Status(enum.IntEnum):
"""Enum of possible cache statuses."""
# NOTE: extracted from https://www.geocaching.com/play/map/public/main.2b28b0dc1c9c10aaba66.js
enabled = 0
disabled = 1
archived = 2
unpublished = 3
19 changes: 19 additions & 0 deletions pycaching/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,3 +55,22 @@ class ValueError(Error, ValueError):
Can be raised in various situations, but most commonly when unexpected property value is set.
"""
pass


class TooManyRequestsError(Error):
"""Geocaching API rate limit has been reached."""

def __init__(self, url: str, rate_limit_reset: int = 0):
"""
Initialize TooManyRequestsError.

:param url: Requested url.
:param rate_limit_reset: Number of seconds to wait before rate limit reset.
"""
self.url = url
self.rate_limit_reset = rate_limit_reset

def wait_for(self):
"""Wait enough time to release Rate Limits."""
import time
time.sleep(self.rate_limit_reset + 5)
4 changes: 4 additions & 0 deletions pycaching/geo.py
Original file line number Diff line number Diff line change
Expand Up @@ -230,6 +230,10 @@ def __init__(self, point_a, point_b):
:param .Point point_a: Top left corner.
:param .Point point_b: Bottom right corner.
"""
if point_a.latitude < point_b.latitude:
point_a.latitude, point_b.latitude = point_b.latitude, point_a.latitude
if point_a.longitude > point_b.longitude:
point_a.longitude, point_b.longitude = point_b.longitude, point_a.longitude

assert point_a != point_b, "Corner points cannot be the same"
self.corners = [point_a, point_b]
Expand Down
90 changes: 86 additions & 4 deletions pycaching/geocaching.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,13 +7,30 @@
import json
import subprocess
import warnings
import enum
from typing import Optional, Union
from urllib.parse import parse_qs, urljoin, urlparse
from os import path
from pycaching.cache import Cache, Size
from pycaching.log import Log, Type as LogType
from pycaching.geo import Point
from pycaching.geo import Point, Rectangle
from pycaching.trackable import Trackable
from pycaching.errors import Error, NotLoggedInException, LoginFailedException, PMOnlyException
from pycaching.errors import Error, NotLoggedInException, LoginFailedException, PMOnlyException, TooManyRequestsError


class SortOrder(enum.Enum):
"""Enum of possible cache sort orderings returned in Groundspeak API."""
# NOTE: extracted from https://www.geocaching.com/play/map/public/main.2b28b0dc1c9c10aaba66.js
container_size = "containersize"
date_last_visited = "datelastvisited"
difficulty = "difficulty"
distance = "distance"
favorite_point = "favoritepoint"
found_date = "founddate"
found_date_of_found_by_user = "founddateoffoundbyuser"
geocache_name = "geocachename"
place_date = "placedate"
terrain = "terrain"


class Geocaching(object):
Expand All @@ -29,6 +46,7 @@ class Geocaching(object):
"search": "play/search",
"search_more": "play/search/more-results",
'my_logs': 'my/logs.aspx',
'api_search': 'api/proxy/web/search'
}
_credentials_file = ".gc_credentials"

Expand Down Expand Up @@ -67,6 +85,12 @@ def _request(self, url, *, expect="soup", method="GET", login_check=True, **kwar
return res

except requests.exceptions.RequestException as e:
if e.response.status_code == 429: # Handle rate limiting errors
raise TooManyRequestsError(
url,
rate_limit_reset=int(e.response.headers.get('x-rate-limit-reset', '0'))
) from e

raise Error("Cannot load page: {}".format(url)) from e

def login(self, username=None, password=None):
Expand Down Expand Up @@ -356,6 +380,64 @@ def search_quick(self, area, *, strict=False, zoom=None):

# add some shortcuts ------------------------------------------------------

def search_rect(
self,
rect: Rectangle,
*,
per_query: int = 200,
sort_by: Union[str, SortOrder] = SortOrder.date_last_visited,
origin: Optional[Point] = None,
wait_sleep: bool = True
):
"""
Return a generator of caches in given Rectange area.

:param rect: Search area.
:param int per_query: Number of caches requested in single query.
:param sort_by: Order cached by given criterion.
:param origin: Origin point for search by distance.
:param wait_sleep: In case of rate limits exceeding, wait appropriate time if set True,
otherwise just yield None.
"""
if not isinstance(sort_by, SortOrder):
sort_by = SortOrder(sort_by)

params = {
"box": "{},{},{},{}".format(
rect.corners[0].latitude,
rect.corners[0].longitude,
rect.corners[1].latitude,
rect.corners[1].longitude,
),
"take": per_query,
"asc": "true",
"skip": 0,
"sort": sort_by.value,
}

if sort_by is SortOrder.distance:
assert isinstance(origin, Point)
params["origin"] = "{},{}".format(origin.latitude, origin.longitude)

total, offset = None, 0
while (total is None) or (offset < total):
params["skip"] = offset

try:
resp = self._request(self._urls["api_search"], params=params, expect="json")
except TooManyRequestsError as e:
if wait_sleep:
e.wait_for()
else:
yield None
continue

for record in resp["results"]:
yield Cache._from_api_record(self, record)

total = resp["total"]
offset += per_query

def geocode(self, location):
"""Return a :class:`.Point` object from geocoded location.

Expand Down Expand Up @@ -396,8 +478,8 @@ def post_log(self, wp, text, type=LogType.found_it, date=None):
"""
if not date:
date = datetime.date.today()
l = Log(type=type, text=text, visited=date)
self.get_cache(wp).post_log(l)
log = Log(type=type, text=text, visited=date)
self.get_cache(wp).post_log(log)

def _cache_from_guid(self, guid):
logging.info('Loading cache with GUID {!r}'.format(guid))
Expand Down
2 changes: 1 addition & 1 deletion pycaching/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def format_date(date, user_date_format):
"""Format a date according to user_date_format."""
# parse user format
date_format = user_date_format.lower()
date_format = re.split("(\W+)", date_format)
date_format = re.split(r"(\W+)", date_format)
# non-zero-padded numbers use different characters depending on different platforms
# see https://strftime.org/ for example
eat_zero_prefix = "#" if platform.system() == "Windows" else "-"
Expand Down
Loading