Skip to content

Commit 802a233

Browse files
yeldarbyclaude
andcommitted
fix(cli): fix QA issues — fork output, video error, version help, timestamps
- workflow fork: read 'workflow' key from API response (was looking for 'url') - video status: provide actionable error with hint for not-found jobs - version create: add settings JSON example in --help epilog - annotation batch/job: convert Firestore timestamps to ISO 8601 Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent cb06bd6 commit 802a233

4 files changed

Lines changed: 53 additions & 6 deletions

File tree

roboflow/cli/handlers/annotation.py

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,24 @@ def _add_job(sub: argparse._SubParsersAction) -> None: # type: ignore[type-arg]
7575
job_parser.set_defaults(func=lambda args: job_parser.print_help())
7676

7777

78+
# ---------------------------------------------------------------------------
79+
# helpers
80+
# ---------------------------------------------------------------------------
81+
82+
83+
def _normalize_timestamps(obj):
84+
"""Recursively convert Firestore timestamp dicts ({"_seconds": N, "_nanoseconds": N}) to ISO 8601 strings."""
85+
from datetime import datetime, timezone
86+
87+
if isinstance(obj, dict):
88+
if "_seconds" in obj and "_nanoseconds" in obj and len(obj) == 2:
89+
return datetime.fromtimestamp(obj["_seconds"], tz=timezone.utc).isoformat()
90+
return {k: _normalize_timestamps(v) for k, v in obj.items()}
91+
if isinstance(obj, list):
92+
return [_normalize_timestamps(item) for item in obj]
93+
return obj
94+
95+
7896
# ---------------------------------------------------------------------------
7997
# handlers
8098
# ---------------------------------------------------------------------------
@@ -117,6 +135,7 @@ def _batch_list(args: argparse.Namespace) -> None:
117135
return
118136

119137
batches = data if isinstance(data, list) else data.get("batches", data)
138+
batches = _normalize_timestamps(batches)
120139

121140
table = format_table(
122141
batches if isinstance(batches, list) else [],
@@ -141,6 +160,7 @@ def _batch_get(args: argparse.Namespace) -> None:
141160
output_error(args, str(exc), exit_code=3)
142161
return
143162

163+
data = _normalize_timestamps(data)
144164
batch = data.get("batch", data) if isinstance(data, dict) else data
145165

146166
lines = []
@@ -169,6 +189,7 @@ def _job_list(args: argparse.Namespace) -> None:
169189
return
170190

171191
jobs = data if isinstance(data, list) else data.get("jobs", data)
192+
jobs = _normalize_timestamps(jobs)
172193

173194
table = format_table(
174195
jobs if isinstance(jobs, list) else [],
@@ -193,6 +214,7 @@ def _job_get(args: argparse.Namespace) -> None:
193214
output_error(args, str(exc), exit_code=3)
194215
return
195216

217+
data = _normalize_timestamps(data)
196218
job = data.get("job", data) if isinstance(data, dict) else data
197219

198220
lines = []

roboflow/cli/handlers/version.py

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,15 @@
22

33
from __future__ import annotations
44

5+
import argparse
56
import re
6-
from typing import TYPE_CHECKING
77

8-
if TYPE_CHECKING:
9-
import argparse
8+
9+
class _RawEpilogFormatter(argparse.HelpFormatter):
10+
"""Formatter that preserves raw text in the epilog while wrapping everything else."""
11+
12+
def _fill_text(self, text: str, width: int, indent: str) -> str:
13+
return text
1014

1115

1216
def register(subparsers: argparse._SubParsersAction) -> None: # type: ignore[type-arg]
@@ -40,7 +44,19 @@ def register(subparsers: argparse._SubParsersAction) -> None: # type: ignore[ty
4044
export_parser.set_defaults(func=_export)
4145

4246
# --- version create ---
43-
create_parser = version_subs.add_parser("create", help="Create a new dataset version")
47+
create_parser = version_subs.add_parser(
48+
"create",
49+
help="Create a new dataset version",
50+
epilog=(
51+
"Settings JSON example:\n"
52+
' {"augmentation": {"flip": {"horizontal": true, "vertical": false},\n'
53+
' "rotate": {"degrees": 15}, "brightness": {"percent": 25}},\n'
54+
' "preprocessing": {"auto-orient": true, "resize": {"width": 640,\n'
55+
' "height": 640, "format": "Stretch to"}}}\n\n'
56+
"See https://docs.roboflow.com/datasets/create-a-dataset-version for all options."
57+
),
58+
formatter_class=_RawEpilogFormatter,
59+
)
4460
create_parser.add_argument("-p", "--project", dest="project", required=True, help="Project ID")
4561
create_parser.add_argument(
4662
"--settings", dest="settings", required=True, help="Path to JSON file with augmentation/preprocessing config"

roboflow/cli/handlers/video.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -72,7 +72,16 @@ def _video_status(args: argparse.Namespace) -> None:
7272
try:
7373
data = rfapi.get_video_job_status(api_key, args.job_id)
7474
except rfapi.RoboflowError as exc:
75-
output_error(args, str(exc), exit_code=3)
75+
msg = str(exc)
76+
if "NOT FOUND" in msg.upper():
77+
output_error(
78+
args,
79+
f"Video job '{args.job_id}' not found.",
80+
hint="Run 'roboflow video infer' to start a video job.",
81+
exit_code=3,
82+
)
83+
else:
84+
output_error(args, msg, exit_code=3)
7685
return
7786

7887
status = data.get("status", "unknown")

roboflow/cli/handlers/workflow.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -331,7 +331,7 @@ def _fork_workflow(args: argparse.Namespace) -> None:
331331
output_error(args, str(exc))
332332
return
333333

334-
new_url = data.get("url", data.get("workflow_url", "")) if isinstance(data, dict) else ""
334+
new_url = data.get("workflow", data.get("url", data.get("workflow_url", ""))) if isinstance(data, dict) else ""
335335
result = {"status": "forked", "source": args.workflow_url, "new_url": new_url}
336336
text = f"Forked workflow: {args.workflow_url} -> {new_url}"
337337
output(args, result, text=text)

0 commit comments

Comments
 (0)