diff --git a/src/grizzly/args.py b/src/grizzly/args.py index ca5c64a9..f6028002 100644 --- a/src/grizzly/args.py +++ b/src/grizzly/args.py @@ -349,11 +349,11 @@ def sanity_check(self, args: Namespace) -> None: for asset, path in args.asset: if not supported_assets or asset not in supported_assets: self.parser.error( - f"Asset {asset!r} not supported by target {args.platform!r}" + f"Asset '{asset}' not supported by target '{args.platform}'" ) if not exists(path): self.parser.error( - f"Failed to add asset {asset!r} cannot find {path!r}" + f"Failed to add asset '{asset}' cannot find '{path}'" ) if args.time_limit is not None and args.time_limit < 1: diff --git a/src/grizzly/common/plugins.py b/src/grizzly/common/plugins.py index d551d010..5f2c1e92 100644 --- a/src/grizzly/common/plugins.py +++ b/src/grizzly/common/plugins.py @@ -32,12 +32,12 @@ def load_plugin(name: str, group: str, base_type: type) -> Any: for entry in iter_entry_points(group): if entry.name == name: plugin = entry.load() - LOG.debug("loading %r (%s)", name, base_type.__name__) + LOG.debug("loading '%s' (%s)", name, base_type.__name__) break else: - raise PluginLoadError(f"{name!r} not found in {group!r}") + raise PluginLoadError(f"'{name}' not found in '{group}'") if not issubclass(plugin, base_type): - raise PluginLoadError(f"{name!r} doesn't inherit from {base_type.__name__}") + raise PluginLoadError(f"'{name}' doesn't inherit from {base_type.__name__}") return plugin @@ -51,11 +51,11 @@ def scan_plugins(group: str) -> list[str]: Names of installed entry points. """ found: list[str] = [] - LOG.debug("scanning %r", group) + LOG.debug("scanning '%s'", group) for entry in iter_entry_points(group): if entry.name in found: # not sure if this can even happen - raise PluginLoadError(f"Duplicate entry {entry.name!r} in {group!r}") + raise PluginLoadError(f"Duplicate entry '{entry.name}' in '{group}'") found.append(entry.name) return found diff --git a/src/grizzly/common/runner.py b/src/grizzly/common/runner.py index cae8310b..8c4379e5 100644 --- a/src/grizzly/common/runner.py +++ b/src/grizzly/common/runner.py @@ -394,7 +394,7 @@ def run( else: # something is wrong so close the target # previous iteration put target in a bad state? - LOG.debug("entry point not served (%r)", testcase.entry_point) + LOG.debug("entry point not served (%s)", testcase.entry_point) self._target.close() # detect startup failures if self.initial: diff --git a/src/grizzly/common/stack_hasher.py b/src/grizzly/common/stack_hasher.py index 049a4593..63d44048 100644 --- a/src/grizzly/common/stack_hasher.py +++ b/src/grizzly/common/stack_hasher.py @@ -61,11 +61,11 @@ def __str__(self) -> str: if self.stack_line is not None: out.append(f"{int(self.stack_line):02d}") if self.function is not None: - out.append(f"function: {self.function!r}") + out.append(f"function: '{self.function}'") if self.location is not None: - out.append(f"location: {self.location!r}") + out.append(f"location: '{self.location}'") if self.offset is not None: - out.append(f"offset: {self.offset!r}") + out.append(f"offset: '{self.offset}'") return " - ".join(out) @classmethod @@ -444,7 +444,7 @@ def from_text(cls, input_text: str, major_depth: int = MAJOR_DEPTH) -> Stack: else: frame = parser_class.from_line(line) except Exception: # pragma: no cover - LOG.error("Error calling from_line() with: %r", line) + LOG.error("Error calling from_line() with: '%s'", line) raise if frame is None: continue diff --git a/src/grizzly/common/storage.py b/src/grizzly/common/storage.py index 2a479317..59f07ce7 100644 --- a/src/grizzly/common/storage.py +++ b/src/grizzly/common/storage.py @@ -200,7 +200,7 @@ def add_from_file( else: url_path = self.sanitize_path(file_name) if url_path in self: - raise TestFileExists(f"{url_path!r} exists in test") + raise TestFileExists(f"'{url_path}' exists in test") dst_file = self._root / url_path # don't move/copy data is already in place @@ -546,12 +546,12 @@ def sanitize_path(path: str) -> str: """ # check for missing filename or path containing drive letter (Windows) if split(path)[-1] in ("", ".", "..") or ":" in path: - raise ValueError(f"invalid path {path!r}") + raise ValueError(f"invalid path '{path}'") # normalize path path = normpath(path).replace("\\", "/") # check normalized path does not resolve to location outside of '.' if path.startswith("../"): - raise ValueError(f"invalid path {path!r}") + raise ValueError(f"invalid path '{path}'") return path.lstrip("/") @@ -591,7 +591,7 @@ def load_testcases( test.assets.clear() test.assets_path = None LOG.debug( - "loaded TestCase(s): %d, assets: %r, env vars: %r", + "loaded TestCase(s): %d, assets: %s, env vars: %s", len(tests), asset_mgr is not None, env_vars is not None, diff --git a/src/grizzly/main.py b/src/grizzly/main.py index 7790aa56..5c7dd7c1 100644 --- a/src/grizzly/main.py +++ b/src/grizzly/main.py @@ -126,9 +126,9 @@ def main(args: Namespace) -> int: reporter.display_logs = args.smoke_test or reporter.display_logs if args.limit: - LOG.info("%r iteration(s) will be attempted", args.limit) + LOG.info("%d iteration(s) will be attempted", args.limit) if args.runtime: - LOG.info("Runtime is limited to %rs", args.runtime) + LOG.info("Runtime is limited to %ds", args.runtime) # set 'auto_close=1' so the client error pages (code 4XX) will # call 'window.close()' after a second. diff --git a/src/grizzly/reduce/core.py b/src/grizzly/reduce/core.py index 5434b33b..57f836f3 100644 --- a/src/grizzly/reduce/core.py +++ b/src/grizzly/reduce/core.py @@ -654,8 +654,8 @@ def run( except KeyboardInterrupt: if best_results: LOG.warning( - "Ctrl+C detected, best reduction so far reported as %r", - self._status.last_reports, + "Ctrl+C detected, best reduction so far reported as '%s'", + ", ".join(self._status.last_reports), ) raise finally: diff --git a/src/grizzly/reduce/strategies/lithium.py b/src/grizzly/reduce/strategies/lithium.py index 9c1326a6..c0903ee3 100644 --- a/src/grizzly/reduce/strategies/lithium.py +++ b/src/grizzly/reduce/strategies/lithium.py @@ -100,7 +100,7 @@ def __iter__(self) -> Generator[list[TestCase]]: reduce_queue.sort() # not necessary, but helps make tests more predictable while reduce_queue: LOG.debug( - "Reduce queue: %r", + "Reduce queue: '%s'", ", ".join( str(x.relative_to(self._testcase_root)) for x in reduce_queue ), @@ -145,7 +145,7 @@ def __iter__(self) -> Generator[list[TestCase]]: self._tried.add(self._calculate_testcase_hash()) else: LOG.debug( - "files being reduced before: %r", + "files being reduced before: '%s'", ", ".join( str(x.relative_to(self._testcase_root)) for x in self._files_to_reduce @@ -153,7 +153,7 @@ def __iter__(self) -> Generator[list[TestCase]]: ) self.rescan_files_to_reduce(testcases) LOG.debug( - "files being reduced after: %r", + "files being reduced after: '%s'", ", ".join( str(x.relative_to(self._testcase_root)) for x in self._files_to_reduce diff --git a/src/grizzly/replay/replay.py b/src/grizzly/replay/replay.py index f7e4e926..bf229e06 100644 --- a/src/grizzly/replay/replay.py +++ b/src/grizzly/replay/replay.py @@ -394,7 +394,7 @@ def harness_fn(_: str) -> bytes: # pragma: no cover assert not expect_hang assert self._signature is None LOG.debug( - "no signature given, using short sig %r", + "no signature given, using short sig '%s'", report.short_signature, ) if runner.startup_failure: @@ -447,7 +447,7 @@ def harness_fn(_: str) -> bytes: # pragma: no cover self.status.ignored += 1 if run_result.timeout: LOG.info( - "Result: Ignored (%d); timeout, idle: %r", + "Result: Ignored (%d); timeout, idle: %s", self.status.ignored, run_result.idle, ) @@ -511,7 +511,7 @@ def harness_fn(_: str) -> bytes: # pragma: no cover if not success and result.expected: if not self._any_crash: LOG.debug( - "%r less than minimum (%d/%d)", + "'%s' less than minimum (%d/%d)", crash_hash, result.count, min_results, diff --git a/src/grizzly/session.py b/src/grizzly/session.py index 40ce573a..57fe0659 100644 --- a/src/grizzly/session.py +++ b/src/grizzly/session.py @@ -178,7 +178,7 @@ def run( LOG.debug("calling adapter.setup()") self.adapter.setup(input_path, self.iomanager.server_map) - LOG.debug("configuring harness (%r)", not no_harness) + LOG.debug("configuring harness (%s)", not no_harness) harness = None if no_harness else self.adapter.get_harness() LOG.debug("configuring redirects (w/harness: %s)", harness is not None) if harness is None: @@ -298,7 +298,7 @@ def run( self.status.ignored += 1 if result.timeout: LOG.info( - "Ignored - %d; timeout, idle: %r", + "Ignored - %d; timeout, idle: %s", self.status.ignored, result.idle, ) diff --git a/src/grizzly/target/assets.py b/src/grizzly/target/assets.py index 075df62f..1a0b075b 100644 --- a/src/grizzly/target/assets.py +++ b/src/grizzly/target/assets.py @@ -53,7 +53,7 @@ def add(self, asset: str, path: Path, copy: bool = True) -> Path: dst = self.path / path.name # remove existing asset with the same name if asset in self.assets: - LOG.debug("asset %r exists, removing existing", asset) + LOG.debug("asset '%s' exists, removing existing", asset) self.remove(asset) # avoid overwriting data that is part of an existing asset if dst.exists(): @@ -66,7 +66,7 @@ def add(self, asset: str, path: Path, copy: bool = True) -> Path: else: move(path, self.path) self.assets[asset] = path.name - LOG.debug("%s asset %r to '%s'", "copied" if copy else "moved", asset, dst) + LOG.debug("%s asset '%s' to '%s'", "copied" if copy else "moved", asset, dst) return dst def add_batch(self, assets: Iterable[Sequence[str]]) -> None: diff --git a/src/grizzly/target/firefox_target.py b/src/grizzly/target/firefox_target.py index bd8da2ac..155c8a68 100644 --- a/src/grizzly/target/firefox_target.py +++ b/src/grizzly/target/firefox_target.py @@ -284,7 +284,7 @@ def handle_hang( elif send_abort: # sending SIGABRT is only supported on Linux for now # TODO: add/test on other OSs - LOG.debug("sending SIGABRT to %r (%0.1f%%)", pid, cpu) + LOG.debug("sending SIGABRT to %d (%0.1f%%)", pid, cpu) try: kill(pid, SIGABRT) except OSError: @@ -384,7 +384,7 @@ def process_assets(self) -> None: path = Path(suppressions.strip("\"'")) if path.is_file(): # use environment specified suppression file - LOG.debug("using %r from environment", asset) + LOG.debug("using '%s' from environment", asset) opts.add( "suppressions", f"'{self.asset_mgr.add(asset, path)}'", @@ -393,10 +393,10 @@ def process_assets(self) -> None: else: LOG.warning("Missing %s suppressions file '%s'", sanitizer, path) else: - LOG.debug("%r does not contain suppressions", var_name) + LOG.debug("'%s' does not contain suppressions", var_name) continue # update sanitized *SAN_OPTIONS - LOG.debug("updating suppressions in %r", var_name) + LOG.debug("updating suppressions in '%s'", var_name) self.environ[var_name] = str(opts) def save_logs(self, dst: Path) -> None: diff --git a/src/sapphire/connection_manager.py b/src/sapphire/connection_manager.py index 713bc733..53bb4475 100644 --- a/src/sapphire/connection_manager.py +++ b/src/sapphire/connection_manager.py @@ -143,7 +143,7 @@ def serve( launches = 0 running = 0 workers: list[Worker] = [] - LOG.debug("accepting requests (workers: %d, timeout: %r)", self._limit, timeout) + LOG.debug("accepting requests (workers: %d, timeout: %d)", self._limit, timeout) try: while not self._job.is_complete() and self._can_continue(continue_cb): # launch workers diff --git a/src/sapphire/core.py b/src/sapphire/core.py index 17a5b606..81a66737 100644 --- a/src/sapphire/core.py +++ b/src/sapphire/core.py @@ -176,7 +176,7 @@ def clear_backlog(self, timeout: float = 10) -> bool: # no remaining pending connections break except OSError as exc: - LOG.debug("Error closing socket: %r", exc) + LOG.debug("Error closing socket: '%s'", exc) else: LOG.debug("pending socket closed") # if this fires something is likely actively trying to connect @@ -238,7 +238,7 @@ def serve_path( Served status and files served. """ assert self.timeout >= 0 - LOG.debug("serving '%s' (forever=%r, timeout=%d)", path, forever, self.timeout) + LOG.debug("serving '%s' (forever=%s, timeout=%d)", path, forever, self.timeout) job = Job( path, auto_close=self._auto_close, @@ -248,7 +248,7 @@ def serve_path( ) with ConnectionManager(job, self._socket, limit=self._max_workers) as mgr: timed_out = not mgr.serve(self.timeout, continue_cb=continue_cb) - LOG.debug("status: %s, timed out: %r", job.status.name, timed_out) + LOG.debug("status: %s, timed out: %s", job.status.name, timed_out) return ServeResult(job.served, job.status, timed_out) @classmethod diff --git a/src/sapphire/job.py b/src/sapphire/job.py index 538f4a6b..16057b60 100644 --- a/src/sapphire/job.py +++ b/src/sapphire/job.py @@ -119,7 +119,7 @@ def _build_pending(self, required_files: Iterable[str] | None) -> None: entry = self._wwwroot / required if entry.is_file(): self._pending.files.add(str(entry.resolve())) - LOG.debug("required: %r", required) + LOG.debug("required: '%s'", required) # if nothing was found check if the path exists if not self._pending.files and not self._wwwroot.is_dir(): raise OSError(f"wwwroot '{self._wwwroot}' does not exist") @@ -130,7 +130,7 @@ def _build_pending(self, required_files: Iterable[str] | None) -> None: if resource.required: self._pending.files.add(url) LOG.debug( - "required: %r -> %r", + "required: '%s' -> '%s'", url, ( "" @@ -191,7 +191,7 @@ def lookup_resource( return self.server_map.dynamic[path] # search include paths for a match for inc in (x for x in self.server_map.include if path.startswith(x)): - LOG.debug("checking include %r", inc) + LOG.debug("checking include '%s'", inc) # strip include prefix from potential file name file = path[len(inc) :].lstrip("/") local = self.server_map.include[inc].target / file diff --git a/src/sapphire/server_map.py b/src/sapphire/server_map.py index b0ce6191..78a58309 100644 --- a/src/sapphire/server_map.py +++ b/src/sapphire/server_map.py @@ -88,8 +88,8 @@ def set_dynamic_response( if not isinstance(mime_type, str): raise TypeError("mime_type must be of type 'str'") if url in self.include or url in self.redirect: - raise MapCollisionError(f"URL collision on {url!r}") - LOG.debug("mapping dynamic response %r -> %r (%r)", url, callback, mime_type) + raise MapCollisionError(f"URL collision on '{url}'") + LOG.debug("mapping dynamic response '%s' -> %r (%s)", url, callback, mime_type) self.dynamic[url] = DynamicResource(url, required, callback, mime_type) def set_include(self, url: str, target: Path) -> None: @@ -97,7 +97,7 @@ def set_include(self, url: str, target: Path) -> None: if not target.is_dir(): raise FileNotFoundError(f"Include path not found: {target}") if url in self.dynamic or url in self.redirect: - raise MapCollisionError(f"URL collision on {url!r}") + raise MapCollisionError(f"URL collision on '{url}'") # sanity check to prevent mapping overlapping paths # Note: This was added to help map file served via includes back to # the files on disk. This is a temporary workaround until mapping of @@ -107,16 +107,16 @@ def set_include(self, url: str, target: Path) -> None: # allow overwriting entry continue if resource.target in target.parents: - LOG.error("%r mapping includes path '%s'", existing_url, target) + LOG.error("'%s' mapping includes path '%s'", existing_url, target) raise MapCollisionError( - f"{url!r} and {existing_url!r} include '{target}'" + f"'{url}' and '{existing_url}' include '{target}'" ) if target in resource.target.parents: - LOG.error("%r mapping includes path '%s'", url, resource.target) + LOG.error("'%s' mapping includes path '%s'", url, resource.target) raise MapCollisionError( - f"{url!r} and {existing_url!r} include '{resource.target}'" + f"'{url}' and '{existing_url}' include '{resource.target}'" ) - LOG.debug("mapping include %r -> '%s'", url, target) + LOG.debug("mapping include '%s' -> '%s'", url, target) self.include[url] = IncludeResource(url, False, target) def set_redirect(self, url: str, target: str, required: bool = True) -> None: @@ -126,5 +126,5 @@ def set_redirect(self, url: str, target: str, required: bool = True) -> None: if not target: raise TypeError("target must not be an empty string") if url in self.dynamic or url in self.include: - raise MapCollisionError(f"URL collision on {url!r}") + raise MapCollisionError(f"URL collision on '{url}'") self.redirect[url] = RedirectResource(url, required, target) diff --git a/src/sapphire/worker.py b/src/sapphire/worker.py index 43767350..7c34c644 100644 --- a/src/sapphire/worker.py +++ b/src/sapphire/worker.py @@ -168,13 +168,15 @@ def handle_request(cls, conn: socket, serv_job: Job) -> None: conn.sendall( cls._4xx_page(405, "Method Not Allowed", serv_job.auto_close) ) - LOG.debug("405 method %r (%d to go)", request.method, serv_job.pending) + LOG.debug( + "405 method '%s' (%d to go)", request.method, serv_job.pending + ) return # lookup resource - LOG.debug("lookup resource %r", request.url.path) + LOG.debug("lookup resource '%s'", request.url.path) resource = serv_job.lookup_resource(request.url.path) - if resource: + if resource is not None: if isinstance(resource, FileResource): finish_job = serv_job.remove_pending(str(resource.target)) elif isinstance(resource, (DynamicResource, RedirectResource)): @@ -202,11 +204,11 @@ def handle_request(cls, conn: socket, serv_job: Job) -> None: elif isinstance(resource, RedirectResource): redirect_to = [quote(resource.target)] if request.url.query: - LOG.debug("appending query %r", request.url.query) + LOG.debug("appending query '%s'", request.url.query) redirect_to.append(request.url.query) conn.sendall(cls._307_redirect("?".join(redirect_to))) LOG.debug( - "307 %r -> %r (%d to go)", + "307 '%s' -> '%s' (%d to go)", request.url.path, resource.target, serv_job.pending, @@ -215,12 +217,12 @@ def handle_request(cls, conn: socket, serv_job: Job) -> None: # pass query string to callback data = resource.target(request.url.query) if not isinstance(data, bytes): - LOG.debug("dynamic request: %r", request.url.path) + LOG.debug("dynamic request: '%s'", request.url.path) raise TypeError("dynamic request callback must return 'bytes'") conn.sendall(cls._200_header(len(data), resource.mime)) conn.sendall(data) LOG.debug( - "200 %r - dynamic request (%d to go)", + "200 '%s' - dynamic request (%d to go)", request.url.path, serv_job.pending, ) @@ -229,7 +231,7 @@ def handle_request(cls, conn: socket, serv_job: Job) -> None: # serve the file data_size = resource.target.stat().st_size LOG.debug( - "sending: %s, %r, '%s'", + "sending: %s, '%s', '%s'", f"{data_size:,}B", resource.mime, resource.target, @@ -240,7 +242,7 @@ def handle_request(cls, conn: socket, serv_job: Job) -> None: while offset < data_size: conn.sendall(in_fp.read(cls.DEFAULT_TX_SIZE)) offset = in_fp.tell() - LOG.debug("200 %r (%d to go)", request.url.path, serv_job.pending) + LOG.debug("200 '%s' (%d to go)", request.url.path, serv_job.pending) serv_job.mark_served(resource) except (OSError, sock_timeout):