Skip to content

Commit

Permalink
feat: Update dataset loading logs to reflect current time
Browse files Browse the repository at this point in the history
  • Loading branch information
KeplerC committed Jun 23, 2024
1 parent bcfca4f commit f6e8419
Showing 1 changed file with 13 additions and 33 deletions.
46 changes: 13 additions & 33 deletions examples/dataloader/Untitled.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -283,40 +283,25 @@
},
{
"cell_type": "code",
"execution_count": 45,
"execution_count": 52,
"id": "89164adb",
"metadata": {},
"outputs": [],
"source": [
"number_of_samples = 1\n",
"def iterate_dataset(loader: BaseLoader, number_of_samples):\n",
" for i, data in enumerate(loader): \n",
" list(dict(data)[\"steps\"])\n",
" if i == number_of_samples:\n",
" break"
]
},
{
"cell_type": "code",
"execution_count": 50,
"execution_count": 54,
"id": "f192e1d7",
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"I 2024-06-22 20:30:54,640 dataset_info.py:617] Load dataset info from /home/kych/datasets/berkeley_autolab_ur5/0.1.0\n",
"I 2024-06-22 20:30:54,653 reader.py:261] Creating a tf.data.Dataset reading 1 files located in folders: /home/kych/datasets/berkeley_autolab_ur5/0.1.0.\n",
"I 2024-06-22 20:30:54,703 logging_logger.py:49] Constructing tf.data.Dataset berkeley_autolab_ur5 for split train[:1], from /home/kych/datasets/berkeley_autolab_ur5/0.1.0\n"
]
}
],
"outputs": [],
"source": [
"\n",
"\n",
"rtx_loader = RTXLoader(os.path.expanduser(\"~/datasets/berkeley_autolab_ur5/0.1.0\"), split = f'train[:{number_of_samples}]')\n",
"\n",
"import pandas as pd\n",
"import pyarrow as pa\n",
"import pyarrow.parquet as pq\n",
Expand Down Expand Up @@ -350,12 +335,13 @@
" table = pa.Table.from_pandas(combined_df)\n",
" pq.write_table(table, output_path)\n",
"\n",
"ParquetExporter().export(rtx_loader, \"output.parquet\")\n"
"ParquetExporter().export(RTXLoader(os.path.expanduser(\"~/datasets/berkeley_autolab_ur5/0.1.0\"), split = f'train[:{number_of_samples}]')\n",
", \"output.parquet\")\n"
]
},
{
"cell_type": "code",
"execution_count": 47,
"execution_count": 55,
"id": "1d56f44d",
"metadata": {},
"outputs": [],
Expand All @@ -376,18 +362,10 @@
},
{
"cell_type": "code",
"execution_count": 48,
"execution_count": 56,
"id": "80867561",
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/home/kych/fog_x/examples/dataloader/mkv_output//output_0.mkv\n"
]
}
],
"outputs": [],
"source": [
"\n",
"exporter = MKVExporter()\n",
Expand All @@ -397,7 +375,7 @@
},
{
"cell_type": "code",
"execution_count": 49,
"execution_count": 57,
"id": "fcfc1bdb",
"metadata": {},
"outputs": [
Expand All @@ -423,11 +401,13 @@
},
{
"cell_type": "code",
"execution_count": null,
"execution_count": 58,
"id": "0822a45a",
"metadata": {},
"outputs": [],
"source": []
"source": [
"iterate_dataset(rtx_loader, number_of_samples)"
]
},
{
"cell_type": "code",
Expand Down

0 comments on commit f6e8419

Please sign in to comment.