Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions python/looker_sdk/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,15 +103,15 @@ class ErrorDocHelper:
)
pattern = re.compile(RE_PATTERN, flags=re.IGNORECASE)

def get_index(self, url: str = ERROR_CODES_URL) -> None:
r = requests.get(f"{url}index.json")
def get_index(self, url: str = ERROR_CODES_URL, timeout: int = 10) -> None:
r = requests.get(f"{url}index.json", timeout=timeout)
self.lookup_dict = json.loads(r.text)
Comment on lines +107 to 108
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

The requests.get() call doesn't check for unsuccessful HTTP status codes. If the server returns an error (e.g., 404, 500), r.text might not be valid JSON, which could lead to a json.JSONDecodeError. It's a good practice to handle such responses explicitly.

I suggest using r.raise_for_status() to raise an HTTPError for bad responses. This exception is a subclass of requests.exceptions.RequestException and will be caught by the handler in parse_and_lookup, ensuring graceful failure. Using r.json() is also more idiomatic for parsing JSON responses.

Suggested change
r = requests.get(f"{url}index.json", timeout=timeout)
self.lookup_dict = json.loads(r.text)
r = requests.get(f"{url}index.json", timeout=timeout)
r.raise_for_status()
self.lookup_dict = r.json()


def lookup(
self, url: str = ERROR_CODES_URL, code: str = "", path: str = ""
self, url: str = ERROR_CODES_URL, code: str = "", path: str = "", timeout: int = 10
) -> Tuple[str, str]:
if len(self.lookup_dict) == 0:
self.get_index(url=url)
self.get_index(url=url, timeout=timeout)

error_doc_url: str = ""
error_doc: str = ""
Expand All @@ -128,13 +128,13 @@ def lookup(
error_doc = f"### No documentation found for {code}"

if error_doc_url:
r = requests.get(f"{self.ERROR_CODES_URL}{error_doc_url}")
r = requests.get(f"{self.ERROR_CODES_URL}{error_doc_url}", timeout=timeout)
error_doc = r.text
Comment on lines +131 to 132
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

medium

Similar to the other requests.get() call, this one doesn't handle non-200 HTTP status codes. If the request fails, r.text could contain an error message (e.g., an HTML page) instead of the expected markdown documentation. To make this more robust, it's best to check the response status.

Adding r.raise_for_status() will ensure the request was successful before its content is used. The resulting exception will be handled gracefully by the try...except block in parse_and_lookup.

Suggested change
r = requests.get(f"{self.ERROR_CODES_URL}{error_doc_url}", timeout=timeout)
error_doc = r.text
r = requests.get(f"{self.ERROR_CODES_URL}{error_doc_url}", timeout=timeout)
r.raise_for_status()
error_doc = r.text


return (f"{self.ERROR_CODES_URL}{error_doc_url}", error_doc)

def parse_and_lookup(
self, error_url: str, url: str = ERROR_CODES_URL
self, error_url: str, url: str = ERROR_CODES_URL, timeout: int = 10
) -> Tuple[str, str]:
m = re.search(self.RE_PATTERN, error_url)
if not m:
Expand All @@ -143,6 +143,6 @@ def parse_and_lookup(
code: str = cast(Tuple[str, str, str, str], m.groups())[2]
path: str = cast(Tuple[str, str, str, str], m.groups())[3]
try:
return self.lookup(url=url, code=code, path=path)
return self.lookup(url=url, code=code, path=path, timeout=timeout)
except requests.exceptions.RequestException:
return ("", "")
Loading