diff --git a/notebooks/pytorch/pytorchtext.ipynb b/notebooks/pytorch/pytorchtext.ipynb
index ef57191..2ed3911 100644
--- a/notebooks/pytorch/pytorchtext.ipynb
+++ b/notebooks/pytorch/pytorchtext.ipynb
@@ -8,7 +8,8 @@
"collapsed_sections": [
"qSuUpkj1UuUa"
],
- "toc_visible": true
+ "toc_visible": true,
+ "authorship_tag": "ABX9TyPo11ThkRGyvDnABBBLZ335"
},
"kernelspec": {
"name": "python3",
@@ -16,7 +17,7 @@
},
"widgets": {
"application/vnd.jupyter.widget-state+json": {
- "4b0c2f038a0d49f4b50ef41121787e67": {
+ "dad0fde1bb234e199ecb90fa8d7121f7": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
@@ -28,15 +29,15 @@
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
- "layout": "IPY_MODEL_ddc16dcb642d4f6887dadb4133a35218",
+ "layout": "IPY_MODEL_b744182028cb49b8b9487b223154c17f",
"_model_module": "@jupyter-widgets/controls",
"children": [
- "IPY_MODEL_048b7c2faaac41c9bb84097dad3522f5",
- "IPY_MODEL_46416a2f4968461fb1659fe5bc1deda6"
+ "IPY_MODEL_4b37a43a1a2a484e81bd846528d0e869",
+ "IPY_MODEL_56c403126c184ab285e8f8c32913adb3"
]
}
},
- "ddc16dcb642d4f6887dadb4133a35218": {
+ "b744182028cb49b8b9487b223154c17f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -87,50 +88,50 @@
"left": null
}
},
- "048b7c2faaac41c9bb84097dad3522f5": {
+ "4b37a43a1a2a484e81bd846528d0e869": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
- "style": "IPY_MODEL_5d97afc63ee644d498651fb937d7e724",
+ "style": "IPY_MODEL_463174bcce1d426f8c782a73be06542f",
"_dom_classes": [],
- "description": "100%",
+ "description": "pos Files: 100%",
"_model_name": "FloatProgressModel",
"bar_style": "success",
- "max": 2,
+ "max": 12500,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
- "value": 2,
+ "value": 12500,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
- "layout": "IPY_MODEL_0feed71a59cc487383ee28d68fa44913"
+ "layout": "IPY_MODEL_2cf03d65545d4ff38ce1529d8830f6e1"
}
},
- "46416a2f4968461fb1659fe5bc1deda6": {
+ "56c403126c184ab285e8f8c32913adb3": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
- "style": "IPY_MODEL_bfb3a3d1264e4dd086f0a5e276ce25cb",
+ "style": "IPY_MODEL_e66c98f8caf94f23828823a8a8e1a3d9",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
- "value": " 2/2 [00:39<00:00, 19.80s/it]",
+ "value": " 12500/12500 [14:32<00:00, 14.33it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
- "layout": "IPY_MODEL_0017ead47fe9489dbb1569732e811d37"
+ "layout": "IPY_MODEL_21eab334c3d6493b8529d8a748f07985"
}
},
- "5d97afc63ee644d498651fb937d7e724": {
+ "463174bcce1d426f8c782a73be06542f": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
@@ -145,7 +146,7 @@
"_model_module": "@jupyter-widgets/controls"
}
},
- "0feed71a59cc487383ee28d68fa44913": {
+ "2cf03d65545d4ff38ce1529d8830f6e1": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -196,7 +197,7 @@
"left": null
}
},
- "bfb3a3d1264e4dd086f0a5e276ce25cb": {
+ "e66c98f8caf94f23828823a8a8e1a3d9": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
@@ -210,7 +211,7 @@
"_model_module": "@jupyter-widgets/controls"
}
},
- "0017ead47fe9489dbb1569732e811d37": {
+ "21eab334c3d6493b8529d8a748f07985": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -261,7 +262,7 @@
"left": null
}
},
- "1f2e5e5663ed44a9940f27328e4d51de": {
+ "30febe7dc42c4f6b83739a431e30c270": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HBoxModel",
"state": {
@@ -273,15 +274,15 @@
"_view_count": null,
"_view_module_version": "1.5.0",
"box_style": "",
- "layout": "IPY_MODEL_e082f6515fb1430da7a8f0f416120f1a",
+ "layout": "IPY_MODEL_cd437d138e494933ba6d10a30e0ed67a",
"_model_module": "@jupyter-widgets/controls",
"children": [
- "IPY_MODEL_d079c5d500a84d7c8871fd333c0ea4cd",
- "IPY_MODEL_c44d83db9f1b45cca5f8310f17d5a126"
+ "IPY_MODEL_bdbd4b6d1fb44512b65122ac4796158b",
+ "IPY_MODEL_e5023ddea8cc4c98ba36b2e699679e06"
]
}
},
- "e082f6515fb1430da7a8f0f416120f1a": {
+ "cd437d138e494933ba6d10a30e0ed67a": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -332,50 +333,50 @@
"left": null
}
},
- "d079c5d500a84d7c8871fd333c0ea4cd": {
+ "bdbd4b6d1fb44512b65122ac4796158b": {
"model_module": "@jupyter-widgets/controls",
"model_name": "FloatProgressModel",
"state": {
"_view_name": "ProgressView",
- "style": "IPY_MODEL_c28d141d1bd64acdba79ebcb19ea2b0d",
+ "style": "IPY_MODEL_66c74197bf53429099167c5805bd8dfb",
"_dom_classes": [],
- "description": "100%",
+ "description": "neg Files: 100%",
"_model_name": "FloatProgressModel",
"bar_style": "success",
- "max": 2,
+ "max": 12500,
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
- "value": 2,
+ "value": 12500,
"_view_count": null,
"_view_module_version": "1.5.0",
"orientation": "horizontal",
"min": 0,
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
- "layout": "IPY_MODEL_39cda08266884d86ab436abb394f7284"
+ "layout": "IPY_MODEL_e7b2214d33fd48989131f7050cb8f34f"
}
},
- "c44d83db9f1b45cca5f8310f17d5a126": {
+ "e5023ddea8cc4c98ba36b2e699679e06": {
"model_module": "@jupyter-widgets/controls",
"model_name": "HTMLModel",
"state": {
"_view_name": "HTMLView",
- "style": "IPY_MODEL_ef7096f4b2894dfcbd8e84ae10b8008c",
+ "style": "IPY_MODEL_1c59c6b29340469587c2e910dcaf50c6",
"_dom_classes": [],
"description": "",
"_model_name": "HTMLModel",
"placeholder": "",
"_view_module": "@jupyter-widgets/controls",
"_model_module_version": "1.5.0",
- "value": " 2/2 [00:19<00:00, 9.97s/it]",
+ "value": " 12500/12500 [14:18<00:00, 14.55it/s]",
"_view_count": null,
"_view_module_version": "1.5.0",
"description_tooltip": null,
"_model_module": "@jupyter-widgets/controls",
- "layout": "IPY_MODEL_da3c41fdd92348e39d83e778dfc847e0"
+ "layout": "IPY_MODEL_e37cf4e14300482581f9d38947933821"
}
},
- "c28d141d1bd64acdba79ebcb19ea2b0d": {
+ "66c74197bf53429099167c5805bd8dfb": {
"model_module": "@jupyter-widgets/controls",
"model_name": "ProgressStyleModel",
"state": {
@@ -390,7 +391,7 @@
"_model_module": "@jupyter-widgets/controls"
}
},
- "39cda08266884d86ab436abb394f7284": {
+ "e7b2214d33fd48989131f7050cb8f34f": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -441,7 +442,7 @@
"left": null
}
},
- "ef7096f4b2894dfcbd8e84ae10b8008c": {
+ "1c59c6b29340469587c2e910dcaf50c6": {
"model_module": "@jupyter-widgets/controls",
"model_name": "DescriptionStyleModel",
"state": {
@@ -455,7 +456,1477 @@
"_model_module": "@jupyter-widgets/controls"
}
},
- "da3c41fdd92348e39d83e778dfc847e0": {
+ "e37cf4e14300482581f9d38947933821": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "1bf8f9f8bce640c1bebb18287571513f": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_6e4c7b7b5a85429293ec6fcab2d7c9a8",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_1fed350aa59b4c7ea25df0f3e82a3067",
+ "IPY_MODEL_ffde509700004f658d5d86fdc861c05e"
+ ]
+ }
+ },
+ "6e4c7b7b5a85429293ec6fcab2d7c9a8": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "1fed350aa59b4c7ea25df0f3e82a3067": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_93db5c1cd89f44ea9374efc63c069a9c",
+ "_dom_classes": [],
+ "description": "pos Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_9b684c37c06a47a1860a1ada542f7a66"
+ }
+ },
+ "ffde509700004f658d5d86fdc861c05e": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_7dcb325407834d97985ad48033279a61",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [14:05<00:00, 14.78it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_825dc452fcbd492d83520b74f43a637a"
+ }
+ },
+ "93db5c1cd89f44ea9374efc63c069a9c": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "9b684c37c06a47a1860a1ada542f7a66": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "7dcb325407834d97985ad48033279a61": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "825dc452fcbd492d83520b74f43a637a": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "5dedadf8a8164dc891262a15d1c449ae": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_5255f81b15d64273a2516cef83f09a1f",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_3d1f93b24f5a4d25846a624981b388f9",
+ "IPY_MODEL_580be97d9d4b40859ad2293b10d56998"
+ ]
+ }
+ },
+ "5255f81b15d64273a2516cef83f09a1f": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "3d1f93b24f5a4d25846a624981b388f9": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_45be9b36c2474d45a3d238a8f831b05f",
+ "_dom_classes": [],
+ "description": "neg Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_ff4c12f0396e489fa1803482ba46a95f"
+ }
+ },
+ "580be97d9d4b40859ad2293b10d56998": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_cf1451d6f5c14335baf75f351952ef05",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [13:53<00:00, 15.00it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_851504272d754ca19dd932fce175cc1c"
+ }
+ },
+ "45be9b36c2474d45a3d238a8f831b05f": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "ff4c12f0396e489fa1803482ba46a95f": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "cf1451d6f5c14335baf75f351952ef05": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "851504272d754ca19dd932fce175cc1c": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "da09e310129e4851a616f4bf5e964fbf": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_ae72e62369234928a7dd35bd6a3b5ad8",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_8f43d50717784a4e8d94ad781c5ed347",
+ "IPY_MODEL_9f0048d3c1234194a119cde8346a16a7"
+ ]
+ }
+ },
+ "ae72e62369234928a7dd35bd6a3b5ad8": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "8f43d50717784a4e8d94ad781c5ed347": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_e9b15e66116248ddb39e1bde4db47715",
+ "_dom_classes": [],
+ "description": "pos Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_dcc77a039e624a13bd0d338f8745cf73"
+ }
+ },
+ "9f0048d3c1234194a119cde8346a16a7": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_1face3ee08304f399779db8848234ddf",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [00:39<00:00, 315.19it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_fa609bb4d84742e1916d4edf038a4360"
+ }
+ },
+ "e9b15e66116248ddb39e1bde4db47715": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "dcc77a039e624a13bd0d338f8745cf73": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "1face3ee08304f399779db8848234ddf": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "fa609bb4d84742e1916d4edf038a4360": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "fce2d470c5904ea0af824659d3a62be1": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_ba0eacfa389942c0be0682e0e2ec1a1a",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_53f7affef6a0470eb1a00ab92b7f32d6",
+ "IPY_MODEL_4da5c0b42df44d98bd49caee01a4b13a"
+ ]
+ }
+ },
+ "ba0eacfa389942c0be0682e0e2ec1a1a": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "53f7affef6a0470eb1a00ab92b7f32d6": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_e547697e00e345b69e4f115f262859ee",
+ "_dom_classes": [],
+ "description": "neg Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_74e8ef5137d44038b4d7fc3b2b374a9d"
+ }
+ },
+ "4da5c0b42df44d98bd49caee01a4b13a": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_08992322ed48407b98dead3c437508fd",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [00:26<00:00, 467.47it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_aa74337e6df7456689b806fec4a93f3d"
+ }
+ },
+ "e547697e00e345b69e4f115f262859ee": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "74e8ef5137d44038b4d7fc3b2b374a9d": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "08992322ed48407b98dead3c437508fd": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "aa74337e6df7456689b806fec4a93f3d": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "759cba0a404d489dbb184e4fb200c280": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_0bbbf8dea24b4db99502ff03fe4deca8",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_7516526db49f4a8dade530a88d56a451",
+ "IPY_MODEL_95e2edba30344f5b90c822ec500e91a0"
+ ]
+ }
+ },
+ "0bbbf8dea24b4db99502ff03fe4deca8": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "7516526db49f4a8dade530a88d56a451": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_10357831f4ef483d8bebedd302994dba",
+ "_dom_classes": [],
+ "description": "pos Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_5f9e91ed3df64991b018121c6990b343"
+ }
+ },
+ "95e2edba30344f5b90c822ec500e91a0": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_d54e99ab3a0849fb984f6d20f3113e5d",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [00:14<00:00, 871.13it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_b7036eb8d6e84347badd2a871b83d8f0"
+ }
+ },
+ "10357831f4ef483d8bebedd302994dba": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "5f9e91ed3df64991b018121c6990b343": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "d54e99ab3a0849fb984f6d20f3113e5d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "b7036eb8d6e84347badd2a871b83d8f0": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "20b455acca3c4a5295ee96677cad8a21": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HBoxModel",
+ "state": {
+ "_view_name": "HBoxView",
+ "_dom_classes": [],
+ "_model_name": "HBoxModel",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "box_style": "",
+ "layout": "IPY_MODEL_6d1902fcd23b42a4b1ed9ed245925649",
+ "_model_module": "@jupyter-widgets/controls",
+ "children": [
+ "IPY_MODEL_8708969a67674267b8081e446380197d",
+ "IPY_MODEL_1ec07bd0793e4ff2a579b6b2be4274b8"
+ ]
+ }
+ },
+ "6d1902fcd23b42a4b1ed9ed245925649": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "8708969a67674267b8081e446380197d": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "FloatProgressModel",
+ "state": {
+ "_view_name": "ProgressView",
+ "style": "IPY_MODEL_59ee69d5f72f473ebffabf3f5b520544",
+ "_dom_classes": [],
+ "description": "neg Files: 100%",
+ "_model_name": "FloatProgressModel",
+ "bar_style": "success",
+ "max": 12500,
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": 12500,
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "orientation": "horizontal",
+ "min": 0,
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_097800a7c8864a64927d0302c58ec47b"
+ }
+ },
+ "1ec07bd0793e4ff2a579b6b2be4274b8": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "HTMLModel",
+ "state": {
+ "_view_name": "HTMLView",
+ "style": "IPY_MODEL_b4e9b60afb0047a8aba77c36236ae783",
+ "_dom_classes": [],
+ "description": "",
+ "_model_name": "HTMLModel",
+ "placeholder": "",
+ "_view_module": "@jupyter-widgets/controls",
+ "_model_module_version": "1.5.0",
+ "value": " 12500/12500 [01:42<00:00, 121.69it/s]",
+ "_view_count": null,
+ "_view_module_version": "1.5.0",
+ "description_tooltip": null,
+ "_model_module": "@jupyter-widgets/controls",
+ "layout": "IPY_MODEL_39270b7674c042b9b51499492b30a915"
+ }
+ },
+ "59ee69d5f72f473ebffabf3f5b520544": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "ProgressStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "ProgressStyleModel",
+ "description_width": "initial",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "bar_color": null,
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "097800a7c8864a64927d0302c58ec47b": {
+ "model_module": "@jupyter-widgets/base",
+ "model_name": "LayoutModel",
+ "state": {
+ "_view_name": "LayoutView",
+ "grid_template_rows": null,
+ "right": null,
+ "justify_content": null,
+ "_view_module": "@jupyter-widgets/base",
+ "overflow": null,
+ "_model_module_version": "1.2.0",
+ "_view_count": null,
+ "flex_flow": null,
+ "width": null,
+ "min_width": null,
+ "border": null,
+ "align_items": null,
+ "bottom": null,
+ "_model_module": "@jupyter-widgets/base",
+ "top": null,
+ "grid_column": null,
+ "overflow_y": null,
+ "overflow_x": null,
+ "grid_auto_flow": null,
+ "grid_area": null,
+ "grid_template_columns": null,
+ "flex": null,
+ "_model_name": "LayoutModel",
+ "justify_items": null,
+ "grid_row": null,
+ "max_height": null,
+ "align_content": null,
+ "visibility": null,
+ "align_self": null,
+ "height": null,
+ "min_height": null,
+ "padding": null,
+ "grid_auto_rows": null,
+ "grid_gap": null,
+ "max_width": null,
+ "order": null,
+ "_view_module_version": "1.2.0",
+ "grid_template_areas": null,
+ "object_position": null,
+ "object_fit": null,
+ "grid_auto_columns": null,
+ "margin": null,
+ "display": null,
+ "left": null
+ }
+ },
+ "b4e9b60afb0047a8aba77c36236ae783": {
+ "model_module": "@jupyter-widgets/controls",
+ "model_name": "DescriptionStyleModel",
+ "state": {
+ "_view_name": "StyleView",
+ "_model_name": "DescriptionStyleModel",
+ "description_width": "",
+ "_view_module": "@jupyter-widgets/base",
+ "_model_module_version": "1.5.0",
+ "_view_count": null,
+ "_view_module_version": "1.2.0",
+ "_model_module": "@jupyter-widgets/controls"
+ }
+ },
+ "39270b7674c042b9b51499492b30a915": {
"model_module": "@jupyter-widgets/base",
"model_name": "LayoutModel",
"state": {
@@ -513,8 +1984,7 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "qSuUpkj1UuUa",
- "colab_type": "text"
+ "id": "qSuUpkj1UuUa"
},
"source": [
"##### © Copyright 2020 [George Mihaila](https://github.com/gmihaila).\n",
@@ -525,9 +1995,7 @@
{
"cell_type": "code",
"metadata": {
- "id": "FkXPZsPbT2aV",
- "colab_type": "code",
- "colab": {}
+ "id": "FkXPZsPbT2aV"
},
"source": [
"#@title Licensed under the Apache License, Version 2.0 (the \"License\");\n",
@@ -548,43 +2016,56 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "aP1zAHX4S70e",
- "colab_type": "text"
+ "id": "aP1zAHX4S70e"
},
"source": [
- "# PyTorchText \n",
+ "# **Better Batches with PyTorchText BucketIterator**\n",
+ "\n",
+ "## **How to use PyTorchText BucketIterator to sort text data for better batching.**\n",
+ "\n",
+ "[](https://colab.research.google.com/github/gmihaila/ml_things/blob/master/notebooks/pytorch/pretrain_transformers_pytorch.ipynb) \n",
+ "[](https://github.com/gmihaila/ml_things/blob/master/notebooks/pytorch/pretrain_transformers_pytorch.ipynb)\n",
+ "[](https://medium.com/@gmihaila/fine-tune-transformers-in-pytorch-using-transformers-57b40450635)\n",
+ "[](https://opensource.org/licenses/Apache-2.0)\n",
"\n",
- "## Example on how to batch text sequences with **BucketIterator**\n",
"\n",
"
\n",
"\n",
- "\n",
- "\n",
- "[](https://colab.research.google.com/github/gmihaila/ml_things/blob/master/notebooks/pytorch/pytorchtext.ipynb) \n",
- "[](https://github.com/gmihaila/ml_things/blob/master/notebooks/pytorch/pytorchtext.ipynb)\n",
+ "**Disclaimer:** *The format of this tutorial notebook is very similar with my other tutorial notebooks. This is done intentionally in order to keep readers familiar with my format.*\n",
"\n",
"
\n",
"\n",
+ "This notebook is a simple tutorial on how to use the powerful **PytorchText** **BucketIterator** function to group examples of similar lengths. This allowes us to provide the most optimal batching method when training models with text data.\n",
"\n",
- "This notebook is an example of using **pytorchtext** powerful **BucketIterator** function which allows grouping examples of similar lengths to provide the most optimal batching method.\n",
+ "The batching grouping provides a lot of gain for recurrent models (RNN, GRU, LSTM) and transformers models (bert, roBerta, gpt2, xlnet, etc.). Basically any model that takes as input variable text data sequences.\n",
"\n",
- "The batching problem provides a lot of gain for recurrent models (RNN, GRU, LSTM) and transformers models (bert, roBerta, gpt2, xlnet, etc.). Basically any model that can deal with variable input batches.\n",
+ "**I will not train any models in this notebook!** I will release a tutorial where I use this implementation to train a transformer model.\n",
"\n",
- "I will not train any model in this notebook. There are other notebooks where I use this batching method to train models.\n",
+ "The purpose is to use an example text datasets and batch it using **PyTorchText** with **BucketIterator** and show how it groups text sequences of similar length in batches.\n",
"\n",
- "The purpose is to use an example datasets and batch it using **torchtext** with **BucketIterator** and show how it groups text sequences of similar length in batches.\n",
+ "This tutorial has two main parts:\n",
+ "\n",
+ "* **Using PyTorch Dataset with PyTorchText Bucket Iterator**: Here I implemented a standard PyTorch Dataset class that reads in a text dataset and use PyTorch Bucket Iterator to group similar length examples in same batches. I want to show how easy it is to use this powerful fucntinality form PyTorchText on a regular PyTorch Dataset workflow.\n",
+ "\n",
+ "* **Using PyTorch Text TabularDataset with PyTOrchText Bucket Iterator**: Here I use the built-in PyTorchText TabularDataset that reads data straight from local files without the need to create a PyTorch Dataset class.\n",
+ "\n",
+ "*This notebooks is a code adaptation and implementation inspired from a few sources:* [torchtext_translation_tutorial](https://pytorch.org/tutorials/beginner/torchtext_translation_tutorial.html), [pytorch/text - GitHub](https://github.com/pytorch/text), [torchtext documentation](https://torchtext.readthedocs.io/en/latest/index.html#) and [A Comprehensive Introduction to Torchtext](https://mlexplained.com/2018/02/08/a-comprehensive-tutorial-to-torchtext/).\n",
"\n",
"
\n",
"\n",
- "## How to use this notebook? \n",
+ "## **What should I know for this notebook?**\n",
"\n",
- "I am using the [Large Movie Review Dataset v1.0](http://ai.stanford.edu/~amaas/data/sentiment/) dataset which contains **positive sentiments** and **negative sentiments** of movie review. This dataset requires using *Supervised Training* with *Binary Classification*.\n",
+ "Some basic PyTorch regarding Dataset class and using DataLoaders. Some knowledge of PyTorchText is helpful but not critical in understanding this tutorial. The BucketIterator is similar in applying Dataloader to a PyTorch Dataset.\n",
"\n",
- "The code is made with reusability in mind. It can be easily adapted for other text datasets and other NLP tasks. \n",
+ "
\n",
+ "\n",
+ "## **How to use this notebook?**\n",
+ "\n",
+ "The code is made with reusability in mind. It can be easily adapted for other text datasets and other NLP tasks in order to achieve optimal batching. \n",
"\n",
"Comments should provide enough guidance to easily adapt this notebook to your needs.\n",
"\n",
- "This code is designed mostly for **classification tasks** in mind, but it can be adapted for **dialogue generation tasks**.\n",
+ "This code is designed mostly for **classification tasks** in mind, but it can be adapted for any other Natural Language Processing tasks where batching text data is needed.\n",
"\n",
"\n",
"\n",
@@ -593,20 +2074,37 @@
"\n",
"
\n",
"\n",
- "## Notes:\n",
- " * This notebooks is a code adaptation of a few sources I foudn online: [torchtext_translation_tutorial](https://pytorch.org/tutorials/beginner/torchtext_translation_tutorial.html), [pytorch/text - GitHub](https://github.com/pytorch/text), [torchtext documentation](https://torchtext.readthedocs.io/en/latest/index.html#) and [A Comprehensive Introduction to Torchtext](https://mlexplained.com/2018/02/08/a-comprehensive-tutorial-to-torchtext/).\n",
"\n",
- "\n"
+ "## **Dataset**\n",
+ "\n",
+ "This notebook will cover pretraining transformers on a custom dataset. I will use the well known movies reviews positive - negative labeled [Large Movie Review Dataset](https://ai.stanford.edu/~amaas/data/sentiment/).\n",
+ "\n",
+ "The description provided on the Stanford website:\n",
+ "\n",
+ "*This is a dataset for binary sentiment classification containing substantially more data than previous benchmark datasets. We provide a set of 25,000 highly polar movie reviews for training, and 25,000 for testing. There is additional unlabeled data for use as well. Raw text and already processed bag of words formats are provided. See the README file contained in the release for more details.*\n",
+ "\n",
+ "**Why this dataset?** I believe is an easy to understand and use dataset for classification. I think sentiment data is always fun to work with.\n",
+ "\n",
+ "
\n",
+ "\n",
+ "## **Coding**\n",
+ "\n",
+ "Now let's do some coding! We will go through each coding cell in the notebook and describe what it does, what's the code, and when is relevant - show the output.\n",
+ "\n",
+ "I made this format to be easy to follow if you decide to run each code cell in your own python notebook.\n",
+ "\n",
+ "When I learn from a tutorial I always try to replicate the results. I believe it's easy to follow along if you have the code next to the explanations.\n",
+ "\n",
+ "
\n"
]
},
{
"cell_type": "markdown",
"metadata": {
- "id": "8ppW60cUXZQK",
- "colab_type": "text"
+ "id": "8ppW60cUXZQK"
},
"source": [
- "# Downloads\n",
+ "## Downloads\n",
"\n",
"Download the IMDB Movie Reviews sentiment dataset and unzip it locally."
]
@@ -614,9 +2112,7 @@
{
"cell_type": "code",
"metadata": {
- "id": "6l_gehghXapy",
- "colab_type": "code",
- "colab": {}
+ "id": "6l_gehghXapy"
},
"source": [
"# download the dataset\n",
@@ -630,39 +2126,35 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "UCLtm5BiXona",
- "colab_type": "text"
+ "id": "UCLtm5BiXona"
},
"source": [
- "# Installs\n",
+ "## **Installs**\n",
"\n",
- "I will use [ftfy](https://ftfy.readthedocs.io/en/latest/) to fix any bad Unicode there might be in the text data files. \n",
- "\n",
- "Since we don't know for sure of anything is wrong with the text files its safer to run all text through ftfy."
+ "* **[ml_things](https://github.com/gmihaila/ml_things)** library used for various machine learning related tasks. I created this library to reduce the amount of code I need to write for each machine learning project.\n"
]
},
{
"cell_type": "code",
"metadata": {
"id": "1JQhmThRXp7b",
- "colab_type": "code",
+ "outputId": "b2c393ed-71e2-4db1-9389-bc85c2ca02ee",
"colab": {
- "base_uri": "https://localhost:8080/",
- "height": 50
- },
- "outputId": "8f0d69ed-c4cb-4d6c-fea3-d186580fa05a"
+ "base_uri": "https://localhost:8080/"
+ }
},
"source": [
- "# install ftfy to fix any text encoding issues\n",
- "!pip install -q ftfy"
+ "# Install helper functions.\n",
+ "!pip install -q git+https://github.com/gmihaila/ml_things.git"
],
"execution_count": 3,
"outputs": [
{
"output_type": "stream",
"text": [
- "\u001b[?25l\r\u001b[K |█████▏ | 10kB 17.5MB/s eta 0:00:01\r\u001b[K |██████████▎ | 20kB 1.5MB/s eta 0:00:01\r\u001b[K |███████████████▍ | 30kB 1.8MB/s eta 0:00:01\r\u001b[K |████████████████████▌ | 40kB 2.1MB/s eta 0:00:01\r\u001b[K |█████████████████████████▋ | 51kB 1.9MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▊ | 61kB 2.1MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 71kB 1.9MB/s \n",
- "\u001b[?25h Building wheel for ftfy (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
+ "\u001b[?25l\r\u001b[K |█████▏ | 10kB 21.1MB/s eta 0:00:01\r\u001b[K |██████████▎ | 20kB 24.2MB/s eta 0:00:01\r\u001b[K |███████████████▍ | 30kB 14.8MB/s eta 0:00:01\r\u001b[K |████████████████████▌ | 40kB 11.6MB/s eta 0:00:01\r\u001b[K |█████████████████████████▋ | 51kB 5.5MB/s eta 0:00:01\r\u001b[K |██████████████████████████████▊ | 61kB 6.2MB/s eta 0:00:01\r\u001b[K |████████████████████████████████| 71kB 3.6MB/s \n",
+ "\u001b[?25h Building wheel for ml-things (setup.py) ... \u001b[?25l\u001b[?25hdone\n",
+ " Building wheel for ftfy (setup.py) ... \u001b[?25l\u001b[?25hdone\n"
],
"name": "stdout"
}
@@ -671,28 +2163,44 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "X5IO8-xrXvWY",
- "colab_type": "text"
+ "id": "X5IO8-xrXvWY"
},
"source": [
- "# Imports\n",
+ "## **Imports**\n",
"\n",
- "Import python all needed python packages. "
+ "Import all needed libraries for this notebook.\n",
+ "\n",
+ "Declare basic parameters used for this notebook:\n",
+ "\n",
+ "* `device` - Device to use by torch: GPU/CPU. I use CPU as default since I will not perform any costly operaitons.\n",
+ "\n",
+ "* `train_batch_size` - Batch size used on train data.\n",
+ "\n",
+ "* `valid_batch_size` - Batch size used for validation data. It usually is greater than `train_batch_size` since the model would only need to make prediciton and no gradient calculations is needed."
]
},
{
"cell_type": "code",
"metadata": {
- "id": "J42h802BXwTe",
- "colab_type": "code",
- "colab": {}
+ "id": "J42h802BXwTe"
},
"source": [
"import io\n",
"import os\n",
+ "import torchtext\n",
"from tqdm.notebook import tqdm\n",
- "from ftfy import fix_text\n",
- "from torchtext import data"
+ "from ml_things import fix_text\n",
+ "from torch.utils.data import Dataset, DataLoader\n",
+ "\n",
+ "# Will use `cpu` for simplicity.\n",
+ "device = 'cpu'\n",
+ "\n",
+ "# Number of batches for training\n",
+ "train_batch_size = 10\n",
+ "\n",
+ "# Number of batches for validation. Use a larger value than training.\n",
+ "# It helps speed up the validaiton process.\n",
+ "valid_batch_size = 20"
],
"execution_count": 4,
"outputs": []
@@ -700,115 +2208,716 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "shz7rmQO5Uyg",
- "colab_type": "text"
+ "id": "9biWe-OhS4Wo"
},
"source": [
- "# Helper Functions\n",
+ "## Using PyTorch Dataset\n",
"\n",
- "I've created the **file_tsv** function in order to concatenate all text files into a single `.tsv` file.\n",
+ "This is where I create the PyTorch Dataset objects for training and validation that **can** be used to feed data into a model. This is standard procedure when using PyTorch.\n",
+ "\n"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "ChQWVc4IUUPb"
+ },
+ "source": [
+ "### Dataset Class\n",
"\n",
- "Since I'll use the **TabularDataset** from `pytorch.data` I need to pass tabular format file. \n",
+ "Implementation of the PyTorch Dataset class.\n",
"\n",
- "For text data I find the Tab Separated Values format easier to deal with - don't need to import pandas for this."
+ "Most important components in a PyTorch Dataset class are:\n",
+ "* `__len__(self, )` where it returns the number of examples in our dataset that we read in `__init__(self, )`. This will ensure that `len()` will return the number of examples.\n",
+ "* `__getitem__(self, item)` where given an index `item` will return the example coresponding to the `item` position."
]
},
{
"cell_type": "code",
"metadata": {
- "id": "neRkJu_n5Wcp",
- "colab_type": "code",
- "colab": {}
+ "id": "-Fo_0qq_S_Zf"
},
"source": [
- "def file_tsv(partition_path, save_path='./'):\n",
- " \"\"\"Parse each file in partition and keep track of sentiments.\n",
- " Create a list of paris [tag, text]\n",
+ "class MovieReviewsTextDataset(Dataset):\n",
+ " r\"\"\"PyTorch Dataset class for loading data.\n",
+ "\n",
+ " This is where the data parsing happens.\n",
+ "\n",
+ " This class is built with reusability in mind.\n",
+ "\n",
+ " Arguments:\n",
+ "\n",
+ " path (:obj:`str`):\n",
+ " Path to the data partition.\n",
+ "\n",
" \"\"\"\n",
"\n",
- " # list of all examples in format [tag, text]\n",
- " examples = []\n",
- " print(partition_path)\n",
- " # for each sentiment\n",
- " for sentiment in tqdm(['pos', 'neg']):\n",
- " # find path for sentiment\n",
- " sentiment_path = os.path.join(partition_path, sentiment)\n",
- " # get all files from path sentiment\n",
- " files_names = os.listdir(sentiment_path)\n",
- " # for each file in path sentiment\n",
- " for file_name in files_names:\n",
- " # get file content\n",
- " file_content = io.open(os.path.join(sentiment_path, file_name), mode='r', encoding='utf-8').read()\n",
- " # fix any format errors\n",
- " file_content = fix_text(file_content)\n",
- " # append sentiment and file content\n",
- " examples.append([sentiment, file_content])\n",
- " # create a TSV file with same format `sentiment text`\n",
- " examples = [\"%s\\t%s\"%(example[0], example[1]) for example in examples]\n",
- " # create file name\n",
- " tsv_filename = os.path.basename(partition_path) + '_pos_neg_%d.tsv'%len(examples)\n",
- " # write to file\n",
- " io.open(os.path.join(save_path, tsv_filename), mode='w', encoding='utf-8').write('\\n'.join(examples))\n",
+ " def __init__(self, path):\n",
"\n",
- " return tsv_filename"
+ " # Check if path exists.\n",
+ " if not os.path.isdir(path):\n",
+ " # Raise error if path is invalid.\n",
+ " raise ValueError('Invalid `path` variable! Needs to be a directory')\n",
+ " \n",
+ " self.texts = []\n",
+ " self.labels = []\n",
+ " # Since the labels are defined by folders with data we loop \n",
+ " # through each label.\n",
+ " for label in ['pos', 'neg']:\n",
+ " sentiment_path = os.path.join(path, label)\n",
+ "\n",
+ " # Get all files from path.\n",
+ " files_names = os.listdir(sentiment_path)#[:10] # Sample for debugging.\n",
+ " # Go through each file and read its content.\n",
+ " for file_name in tqdm(files_names, desc=f'{label} Files'):\n",
+ " file_path = os.path.join(sentiment_path, file_name)\n",
+ "\n",
+ " # Read content.\n",
+ " content = io.open(file_path, mode='r', encoding='utf-8').read()\n",
+ " # Fix any unicode issues.\n",
+ " content = fix_text(content)\n",
+ " # Save content.\n",
+ " self.texts.append(content)\n",
+ " # Save labels.\n",
+ " self.labels.append(label)\n",
+ "\n",
+ " # Number of exmaples.\n",
+ " self.n_examples = len(self.labels)\n",
+ "\n",
+ " return\n",
+ "\n",
+ "\n",
+ " def __len__(self):\n",
+ " r\"\"\"When used `len` return the number of examples.\n",
+ "\n",
+ " \"\"\"\n",
+ " \n",
+ " return self.n_examples\n",
+ "\n",
+ "\n",
+ " def __getitem__(self, item):\n",
+ " r\"\"\"Given an index return an example from the position.\n",
+ " \n",
+ " Arguments:\n",
+ "\n",
+ " item (:obj:`int`):\n",
+ " Index position to pick an example to return.\n",
+ "\n",
+ " Returns:\n",
+ " :obj:`Dict[str, str]`: Dictionary of inputs that are used to feed \n",
+ " to a model.\n",
+ "\n",
+ " \"\"\"\n",
+ "\n",
+ " return {'text':self.texts[item], 'label':self.labels[item]}"
],
- "execution_count": 5,
+ "execution_count": 20,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
- "id": "SGNgKXkq5X8f",
- "colab_type": "text"
+ "id": "52b7iTASUYAZ"
},
"source": [
- "# Convert data to `.tsv`\n",
+ "### Train - Validation Datasets\n",
"\n",
- "I will call the **file_tsv** function for each of the two partitions **train** and **test**. \n",
+ "Create PyTorch Dataset for train and validation partitions."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "EQ-HrjdOUcK6",
+ "outputId": "de2d9fc2-96ba-4e5c-a571-b691889ed2c0",
+ "colab": {
+ "base_uri": "https://localhost:8080/",
+ "height": 298,
+ "referenced_widgets": [
+ "dad0fde1bb234e199ecb90fa8d7121f7",
+ "b744182028cb49b8b9487b223154c17f",
+ "4b37a43a1a2a484e81bd846528d0e869",
+ "56c403126c184ab285e8f8c32913adb3",
+ "463174bcce1d426f8c782a73be06542f",
+ "2cf03d65545d4ff38ce1529d8830f6e1",
+ "e66c98f8caf94f23828823a8a8e1a3d9",
+ "21eab334c3d6493b8529d8a748f07985",
+ "30febe7dc42c4f6b83739a431e30c270",
+ "cd437d138e494933ba6d10a30e0ed67a",
+ "bdbd4b6d1fb44512b65122ac4796158b",
+ "e5023ddea8cc4c98ba36b2e699679e06",
+ "66c74197bf53429099167c5805bd8dfb",
+ "e7b2214d33fd48989131f7050cb8f34f",
+ "1c59c6b29340469587c2e910dcaf50c6",
+ "e37cf4e14300482581f9d38947933821",
+ "1bf8f9f8bce640c1bebb18287571513f",
+ "6e4c7b7b5a85429293ec6fcab2d7c9a8",
+ "1fed350aa59b4c7ea25df0f3e82a3067",
+ "ffde509700004f658d5d86fdc861c05e",
+ "93db5c1cd89f44ea9374efc63c069a9c",
+ "9b684c37c06a47a1860a1ada542f7a66",
+ "7dcb325407834d97985ad48033279a61",
+ "825dc452fcbd492d83520b74f43a637a",
+ "5dedadf8a8164dc891262a15d1c449ae",
+ "5255f81b15d64273a2516cef83f09a1f",
+ "3d1f93b24f5a4d25846a624981b388f9",
+ "580be97d9d4b40859ad2293b10d56998",
+ "45be9b36c2474d45a3d238a8f831b05f",
+ "ff4c12f0396e489fa1803482ba46a95f",
+ "cf1451d6f5c14335baf75f351952ef05",
+ "851504272d754ca19dd932fce175cc1c"
+ ]
+ }
+ },
+ "source": [
+ "print('Dealing with Train...')\n",
+ "# Create pytorch dataset.\n",
+ "train_dataset = MovieReviewsTextDataset(path='/content/aclImdb/train')\n",
"\n",
- "The function will return the path where the `.tsv` file is saved so we can use it later in pytorchtext."
+ "print(f'Created `train_dataset` with {len(train_dataset)} examples!')\n",
+ "\n",
+ "print()\n",
+ "\n",
+ "print('Dealing with Validation...')\n",
+ "# Create pytorch dataset.\n",
+ "valid_dataset = MovieReviewsTextDataset(path='/content/aclImdb/test')\n",
+ " \n",
+ "print(f'Created `valid_dataset` with {len(valid_dataset)} examples!')"
+ ],
+ "execution_count": 22,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Dealing with Train...\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "dad0fde1bb234e199ecb90fa8d7121f7",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='pos Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "30febe7dc42c4f6b83739a431e30c270",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='neg Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Created `train_dataset` with 25000 examples!\n",
+ "\n",
+ "Dealing with Validation...\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "1bf8f9f8bce640c1bebb18287571513f",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='pos Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "5dedadf8a8164dc891262a15d1c449ae",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='neg Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n",
+ "Created `valid_dataset` with 25000 examples!\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "OuXCNfZLUeJJ"
+ },
+ "source": [
+ "### PyTorch DataLoader\n",
+ "\n",
+ "In order to group examples from the PyTorch Dataset into batches we use PyTorch DataLoader. This is standard when using PyTorch."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "vW8mE9wYTfLJ",
+ "outputId": "e1f977af-72a7-4066-cd25-0d117ed6ba11",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ }
+ },
+ "source": [
+ "# Move pytorch dataset into dataloader.\n",
+ "torch_train_dataloader = DataLoader(train_dataset, batch_size=train_batch_size, shuffle=True)\n",
+ "print(f'Created `torch_train_dataloader` with {len(torch_train_dataloader)} batches!')\n",
+ "\n",
+ "# Move pytorch dataset into dataloader.\n",
+ "torch_valid_dataloader = DataLoader(valid_dataset, batch_size=valid_batch_size, shuffle=False)\n",
+ "print(f'Created `torch_valid_dataloader` with {len(torch_valid_dataloader)} batches!')"
+ ],
+ "execution_count": 23,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Created `torch_train_dataloader` with 2500 batches!\n",
+ "Created `torch_valid_dataloader` with 1250 batches!\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "6IAuKwSvIbxe"
+ },
+ "source": [
+ "### PyTorchText Bucket Iterator Dataloader\n",
+ "\n",
+ "Here is where the magic happens! We pass in the **train_dataset** and **train_dataset** PyTorch Dataset splits into **BucketIterator** to create the actual batches.\n",
+ "\n",
+ "It's very nice that PyTorchText can handle splits! We need to write same line of code again for train and validation split.\n",
+ "\n",
+ "*The **sort_key** parameter is very important!* It is used to order text sequences in batches. Since we want to batch sequence of text with similar length, we will use a simple function that returns the length of an data example (`len(x['text')`). This function needs to follow the format of the PyTorch Dataset in order to return the length of an example, in my case I return a dicitonary with `text` key for an example.\n",
+ "\n",
+ "**It is important to keep `sort=False` and `sort_with_batch=True` to only sort the examples in each batche and not the examples in the whole dataset!**\n",
+ "\n",
+ "Find more details in the pytorchtext **BucketIterator** documentation [here](https://torchtext.readthedocs.io/en/latest/data.html#bucketiterator) - look at the **BPTTIterator** because it has same parameters except the **bptt_len** argument.\n",
+ "\n",
+ "**Note:**\n",
+ "*If you want just a single DataLoader use `torchtext.data.BucketIterator` instead of `torchtext.data.BucketIterator.splits` and make sure to provide just one PyTorch Dataset instead of tuple of PyTorch Datasets and change the parameter `batch_sizes` and its tuple values to `batch_size` with single value: `dataloader = torchtext.data.BucketIterator(dataset, batch_size=batch_size, )`*"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "OKfYOod_LQrs",
+ "outputId": "2e38499b-6f9b-4941-b27f-77ef8e039c43",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ }
+ },
+ "source": [
+ "# Group similar length text sequences together in batches.\n",
+ "torchtext_train_dataloader, torchtext_valid_dataloader = torchtext.data.BucketIterator.splits(\n",
+ " \n",
+ " # Datasets for iterator to draw data from\n",
+ " (train_dataset, valid_dataset),\n",
+ "\n",
+ " # Tuple of train and validation batch sizes.\n",
+ " batch_sizes=(train_batch_size, valid_batch_size),\n",
+ "\n",
+ " # Device to load batches on.\n",
+ " device=device, \n",
+ "\n",
+ " # Function to use for sorting examples.\n",
+ " sort_key=lambda x: len(x['text']),\n",
+ "\n",
+ "\n",
+ " # Repeat the iterator for multiple epochs.\n",
+ " repeat=True, \n",
+ "\n",
+ " # Sort all examples in data using `sort_key`.\n",
+ " sort=False, \n",
+ "\n",
+ " # Shuffle data on each epoch run.\n",
+ " shuffle=True,\n",
+ "\n",
+ " # Use `sort_key` to sort examples in each batch.\n",
+ " sort_within_batch=True,\n",
+ " )\n",
+ "\n",
+ "# Print number of batches in each split.\n",
+ "print('Created `torchtext_train_dataloader` with %d batches!'%len(torchtext_train_dataloader))\n",
+ "print('Created `torchtext_valid_dataloader` with %d batches!'%len(torchtext_valid_dataloader))"
+ ],
+ "execution_count": 67,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Created `torchtext_train_dataloader` with 2500 batches!\n",
+ "Created `torchtext_valid_dataloader` with 1250 batches!\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "gHQaFJvYNkKJ"
+ },
+ "source": [
+ "### Compare DataLoaders\n",
+ "\n",
+ "Let's compare the PyTorch DataLoader batches with the PyTorchText BucketIterator batches. We can see how nicely examples of similar length are grouped in same batch with PyTorchText.\n",
+ "\n",
+ "**Note:** *When using the PyTorchText BucketIterator, make sure to call `create_batches()` before looping through each batch! Else you won't get any output form the iterator.*"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "hevDHd2uNn3K",
+ "outputId": "64326c12-19d9-48f5-f2cc-e206472faddd",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ }
+ },
+ "source": [
+ "# Loop through regular dataloader.\n",
+ "print('PyTorch DataLoader\\n')\n",
+ "for batch in torch_train_dataloader:\n",
+ " \n",
+ " # Let's check batch size.\n",
+ " print('Batch size: %d\\n'% len(batch['text']))\n",
+ " print('LABEL\\tLENGTH\\tTEXT'.ljust(10))\n",
+ "\n",
+ " # Print each example.\n",
+ " for text, label in zip(batch['text'], batch['label']):\n",
+ " print('%s\\t%d\\t%s'.ljust(10) % (label, len(text), text))\n",
+ " print('\\n')\n",
+ " \n",
+ " # Only look at first batch. Reuse this code in training models.\n",
+ " break\n",
+ " \n",
+ "\n",
+ "# Create batches - needs to be called before each loop.\n",
+ "torchtext_train_dataloader.create_batches()\n",
+ "\n",
+ "# Loop through BucketIterator.\n",
+ "print('PyTorchText BuketIterator\\n')\n",
+ "for batch in torchtext_train_dataloader.batches:\n",
+ "\n",
+ " # Let's check batch size.\n",
+ " print('Batch size: %d\\n'% len(batch))\n",
+ " print('LABEL\\tLENGTH\\tTEXT'.ljust(10))\n",
+ " \n",
+ " # Print each example.\n",
+ " for example in batch:\n",
+ " print('%s\\t%d\\t%s'.ljust(10) % (example['label'], len(example['text']), example['text']))\n",
+ " print('\\n')\n",
+ " \n",
+ " # Only look at first batch. Reuse this code in training models.\n",
+ " break"
+ ],
+ "execution_count": 53,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "PyTorch DataLoader\n",
+ "\n",
+ "Batch size: 10\n",
+ "\n",
+ "LABEL\tLENGTH\tTEXT\n",
+ "neg\t2823\tThis show is a perfect example of how the CBC should stick to either news, sports, or satirical sketch comedy. As a developer of situation comedy, CBC has shown it can combine the pizazz of \"King of Kensington\" with the belly laughs of \"The Beachcombers\". It is an embarrassment to great shows like \"Kids in the Hall\" and \"Second City\" that they have to share their comedic roots with this lame production.
I have to admit, that I didn't give this show much of a chance right from when I first heard of its concept. To start, half of the concept is a direct attempt to rip-off one of the few sitcom successes in English-Canadian history, \"Corner Gas\". The rest of the concept--the cultural clash--is far from being original and is too often used as a crutch for screen writing laziness. The selection of the Muslim religion as the basis for the \"fish out of water\" characters seems to be a desperate attempt to be \"edgy\" and \"topical\", but comes off as forced. Some of the jokes that are based around the local's reaction to the newcomers are cringe inducing and thoroughly insulting to the intelligence of everyone involved, especially the audience.
This show is a perfect example of how CBC just doesn't \"get it\" when it comes to creating Canadian content, especially when presenting Canada as a multicultural environment. Cultural diversity in Canada does not have to be presented in such a heavy-handed and forced way. It would be a refreshing change to see CBC introduce diversity into a television show without making the show all about said diversity. I doubt that CBC has sufficient sitcom talent to pull off something so subtle. A comparison could be made to the way diversity is depicted in Corner Gas--i.e. the aboriginal characters are not set apart by their ethnicity nor is their heritage used to generate story lines. More realistically, their lives and the other characters lives intertwine in a way that makes ethnicity no more significant than any of their other personal characteristics.
That being said, even as a formulaic fish-out-of-water comedy this show fails. The acting is weak, the comic pacing all over the map, and the story premises that I saw were too far beyond the suspension of belief, even for a comedy. The only saving grace is the talented Derek McGrath, who is horribly wasted here. I doubt that even the addition of guest stars (Colin Mochrie, for example, as an Anglican archbishop) can save this dog. I decided to give the show a chance once the CBC's 'hype' had died down; but two episodes were all I could stand--I could almost feel my braincells shutting themselves down with each failed punchline. The time-slot would be better served by airing more Coronation Street, Air Farce re-runs, or Dr. Who. Even an infomercial would be an improvement. \n",
+ "neg\t1118\tLike last year, I didn't manage to sit through the whole thing. Okay, so Chris Rock as a host was a good choice because he was vaguely engaging. Or rather, out of all the total bores packed into the theatre, he at least wasn't in the Top 10 Most Boring. A lot of the presenters, on the other hand, were in this coveted Top 10. I hadn't known that the whole thing had been done by autocue (although I knew it was scripted) but it was really terrible to see these supposedly good actors unable to insert expression, look away from the cue and stumble over simple words (Natalie Portman…if there's no director, she's gone). The Night of Fancy Dresses and Boring Speeches was long and tedious, Beyonce Knowles butchered some good songs and there were very few decent acceptance speeches and clips. Adam Sandler wins the Worst Presenter award.
For helping me write this review I'd like to thank my Mum, my Dad, my lawyers and my pedicurist for all believing in me, and I'd like to point out that I have a high metabolism and of course I haven't been starving myself for a month. I'm not going to cry...thank you. \n",
+ "pos\t877\tThis is a great movie. In the same genre of the \"Memphis Belle\". Seen it about 10 years ago. And would like to see it again. There is a link with the history of the hells angels!! How the pilot crew fight the Germans in WO2. And most Changes form pilots to Harley motor cycle rs. The movie is in a way really happened. See the movie! And reed the history of the hells angels at hells at hells angels.com Regards Frederik.
Cast & Crew: John Stamos, John Stockwell, Teri Polo, Kris Kamm, directed by Graham Baker more » Synopsis: The story of a rowdy backwoods rebel biker who joins the Army to avoid a stiff prison sentence after a minor brush with the law. Though he chafes at Army discipline, he soon proves himself under fire as a daring and charismatic leader of men in a Motorcycle Scout Troop in pr-World War II Spain. more » MPAA Rating: PG Runtime: 88 minutes \n",
+ "neg\t687\tThe only thing that surprises me more than the number of people who liked this movie is that it was directed by Clint Eastwood, whose work I admire immensely. The leads had absolutely no chemistry. Not for a second could I believe that there was anything deeper than lust between them. The story just didn't ring true. Add to that stilted conversation, tons of stereotypes, and an incredibly slow plot that basically leads nowhere, and you've got yourself a real stinker. Kay Lenz's nude scenes might be worthwhile for those seeking some salacious fare, but otherwise this is a colossal waste of time. My thoughts as I watched the movie was that itwould have been better titled \"Cheesy.\" \n",
+ "pos\t581\tThis movie kept me constantly entertained. In comparing this to Serial Mom, Mr. Waters has gone back to his grittier side. This is not nearly as polished.
There is a dark side here. A message about how success and fame changes a person -- but more importantly how it changes the people around you.
There is not a false moment in this film.
The characters are somewhat cartoonish... but I want to believe that is what Mr. Waters is trying to achieve.
It is fascinating to watch how Mr. Waters has evolved... This is truly his finest work. \n",
+ "pos\t2420\tLife Begins - and ends - in a typical 1930's maternity / recovery ward, where we view 48 hours in the lives of several high risk pregnant women, played by Loretta Young, Glenda Farrell, Clara Blandick (Aunty Em???), Vivienne Osborne, Dorothy Tree, and Gloria Shea, as they await to give birth. While the film features plot devices which seem far fetched today when maternity wards are much more controlled and restricted, it does offer us a look back in time to see what giving birth in a typical city hospital in 1932 was like for our grandmothers and great-grandmothers. I found the film fascinating and exceptionally moving.
Oddly enough, the most outstanding performance in this film comes from a male cast member, young Eric Linden as Jed Sutton, Grace's (Loretta Young) husband. What an actor! As a first time father, Jed is distraught and uneasy with hospital staff who seem to brush off his concerns about his wife as they might brush crumbs off a cafeteria table. I felt his every concern keenly. I'd like to see more of this actor's work. He had a very emotional voice, which was used to unforgettable effect in Gone With The Wind. In that film Eric played the young soldier whose leg was amputated without anesthesia, who screamed \"Don't cut! Don't cut!\" as Scarlett fled the hospital in horror. Chilling! Another great performance is from Aline MacMahon, who plays Miss Bowers, the nurse. Her character is a salt of the earth type, the kind of nurse we all hope to get for our hospital stays, who breaks the hospital rules constantly in order to show a more humane side of the medical profession.
Loretta Young did another superb acting job here as well, a very authentic and deeply felt performance as Grace. My, she is great in these precodes, I've really grown to appreciate her more as an actress the last few months.
Glenda Farrell played her role of a shrill unwed mother a little over the top for my taste (didn't anyone know back in 1932 that swigging brandy from a hot water bottle might be hazardous to unborn babies' health?) but her character redeems herself in the end.
Also in the cast was an uncredited Gilbert Roland, silent movie star, as a grieving Italian husband. His screen time was brief, but notable.
Life Begins is a must-see precode, try to catch it sometime on TCM, but remember to bring a few hankies to cry into. 9 out of 10. \n",
+ "pos\t692\tI first saw this film 40 years ago on N.Y. television, and thought it was a depressing look at the future. Wells sees restriction of private freedoms as a good thing. (\" no private airplanes\". The 30 year plus war in the film was the reason this film was not shown to British film goers doing the war. The concept of the future, and the Korda an Co. concept of the the machines of the future are the real stars of the film. The very best acting performance is that of Ralph Richardson as the Boss. A combination of Winston Churchill and Edina from Absolutely Fabulous comedy series. It is interesting to note that the Boss's negative personality is somewhat similar to the war time Churchill. \n",
+ "pos\t2373\tBuffs of the adult western that flourished in the 1950s try and trace its origins to the film that kicked off the syndrome. Of course, we can go back to Howard Hawks's Red River (1948) or further still to John Ford's My Darling Clementine (1946), but if we want to stick with this single decade, then it has to be one of a couple of films made in that era's initial year. One is \"The Gunfighter,\" an exquisitely grim tale of a famed gunslinger (Ringo) facing his last shootout. Another from that same year is \"Winchester '73,\" and it's worth noting that Millard Mitchell appears in both as grim, mustached, highly realistic range riders. In The Gunfighter, he's the town marshal expected to arrest Ringo but once rode with him in an outlaw gang. In Winchester, he's the sidekick to Jimmy Stewart, a kind of Horatio to Stewart's Hamlet in this epic/tragic tale. The plot is simple enough: Stewart's lonesome cowpoke wins a remarkable Winchester in a shooting match, beating the meanest man in the west (Stephen McNally), who is actually his own brother and caused the death of their father. When the brother steals the gun, Stewart and Mitchell go after him in a cowboy odyssey that takes them all across the frontier, meeting up with both outlaws and Indians. (In one wonderful bit, two future stars - Rock Hudson and Tony Curtis - play an Indian chief and a U.S. cavalry soldier - during a well staged pitched-battle. Dan Duryea steals the whole show as a giggling outlaw leader, while Shelly Winters, just before she began to gain weight, is fine as the shady lady who ties all the plots together. Today, filmmakers would go on for about four hours to bring such an ambitious idea to the screen, but Anthony Mann does so in an extremely economical amount of time, with not a minute wasted. Such western legends as Bat Masterson and Wyatt Earp (terrifically played by Will Geer) make brief appearances, adding to the historicity as well as the epic nature. The final battle between good and bad brothers, high atop a series of jutting rock canyons, is now legendary among western buffs. It's also worth noting that Stewart, however much associated he became with western films, does what is actually his first western leading man role here - yes, he was in Destry Rides Again eleven years earlier, but was cast in that comedy spoof because he seemed so WRONG for westerns! \n",
+ "neg\t979\tLet's see. In the \"St. Elsewhere\" finale we found out that there was no hospital and that every thing had been in the mind of an autistic child. \"Newhart\" ended by telling us that it had all been a dream. And \"Roseanne\" ended by telling us that it all had taken place in her mind. Very \"creative\". Annoying was more like it. Yes, it was just a TV show and wasn't at all reality. It's just that when you get caught up in a great movie or TV show you end up at least wanting to believe that it's all \"real\". At least as far as the reality it portrays on screen. This type of series finale had been done twice before and was old hat, frustrating and simply not fun to watch. Now \"Newhart\" being all a dream? At least done in a creative way that far exceeded the expectations of anyone who loved the show. The idea itself was not too engaging but it was so brilliantly done that its arguably the Best Series Finale Ever. Roseanne left me feeling cheated after being such a loyal fan. \n",
+ "neg\t492\tI chose \"Dead Creatures\" because I thought it was a zombies movie just like \"28 days\" or so... but not at all. It isn't even a horror movie. Nothing happens, except for a group of women that seem to have been infected by a strange virus that make her to eat human flesh in order to survive.
That plot gives rise to a series of disgusting scenes of cannibalism...
Very VERY BAD MOVIE.
*My rate: 2/10
------------------
------------------ \n",
+ "\n",
+ "\n",
+ "PyTorchText BuketIterator\n",
+ "\n",
+ "Batch size: 10\n",
+ "\n",
+ "LABEL\tLENGTH\tTEXT\n",
+ "neg\t2045\tI'm kinda torn on DARK ANGEL. The film appears to be a \"loving\" tribute to the greatest pin-up to ever live - but there is so little actual \"content\" that the film itself is virtually pointless. I can't really see what the motivation or \"point\" of this film is - as there is very little biographical information provided in the narrative - so those who don't know much about Bettie aren't gonna know much more after watching DARK ANGEL either...
The film basically chronicles the last few years of Bettie's career in bondage modeling. Almost the entire film is comprised of \"re-enactments\" of some of Bettie's more \"famous\" photo-shoots and loops. These re-enactments take up literally 75% of the films run-time, and give virtually no insight into Bettie as a person. The film touches briefly on her short-lived legitimate acting pursuits, and her subsequent decision to leave the \"business\" and become religious - but all of this is pretty much glossed-over in favor of showing long and drawn-out re-enactment scenes...
DARK ANGEL isn't a horrible film - there's just no substance to it. The other problem is that the actress that plays Bettie only really resembles her in farther away shots - up-close it's a no-go. The other thing that irritated me, is that although Bettie did several topless modeling shoots - the only nudity in the film was a short segment shot in a zoo during the end credits. The film itself is obviously extremely low-budget, but does what it can set and costume-wise within it's limitations - so no gripes from me there. The acting is pretty wooden and unmemorable from everyone involved. In fact - the most memorable thing about the whole film for me, was noticing during the end credits that the actor who played Irving Klaw's real name is Dukey Flyswatter. No joke - check the cast list. Can't say that I recommend this one too highly unless you are a true Bettiefile completist and must own anything relating to her. And if you are that bad off - then you need to seek treatment anyway...4/10 \n",
+ "pos\t2056\tJust saying you've got a movie about John Holmes is a guarantee to get some folks in front of the screen, but writer/director James Cox delivers oh so much more. A \"Rashamon\" of the sleazy Hollywood set, the film splitters the July 1981 Wonderland murders through a variety of angles (and film stocks), but mostly through the filter of John Holmes' coked out weasel brain. In a film full of bad guys Holmes is either the most vile, the most pathetic or both. Several versions of the story emerge and merge as Cox flashes jump cuts and twisting title cards amid effects and emoting. The dialogue is fast and naturalistic and never once rings false. While the film takes places two years after Holmes had fallen out of porn and into a truly wicked drug fueled depravity, Kilmer relentlessly exudes a sexuality so intense it can be measured in inches. This sexuality at its edges creates a sense of foreboding that hangs over the entire film almost as heavily as the violence at its center. Those murders are teased at through the whole film though are never clearly shown, not even at the climax,though the violence of them relentlessly infuses the whole picture and much blood is splattered across walls and crime scene photos. Once again Val Kilmer as Holmes shows he can act wacko better than anyone else working. Strutting, cringing, bragging or begging, Kilmer is constantly in character and the character is constantly a fascinating car wreck. Stand out performances beside Kilmer definitely include Ted Levine as the lead cop in the investigation and Lisa Kudrow as Holmes estranged wife. The trio of criminals Holmes falls in with include the frighteningly high energy Josh Lucas, the ever interesting Timothy Blake Nelson and an absolutely unrecognizable Dylan McDermott in a pivotal role as the teller yet another version of the murders. Cox suggests that no matter how much we learn about Wonderland, there is always a worse version possible, but looking through the debauchery surrounding it is much more fascinating than understanding the truth. \n",
+ "pos\t2065\t\"Cinderella\" is one of the most beloved of all Disney classics. And it really deserves its status. Based on the classic fairy-tale as told by Charles Perrault, the film follows the trials and tribulations of Cinderella, a good girl who is mistreated by her evil stepmother and equally unlikable stepsisters. When a royal ball is held and all eligible young women are invited (read: the King wants to get the Prince to marry), Cinderella is left at home whilst her stepmother takes her awful daughters with her. But there is a Fairy Godmother on hand...
The story of \"Cinderella\" on its own wouldn't be able to pad out a feature, so whilst generally staying true to the story otherwise, the fairly incidental characters of the animals whom the Fairy Godmother uses to help get the title character to the ball become Cinderella's true sidekicks. The mice Jaq and Gus are the main sidekicks, and their own nemesis being the stepmother's cat Lucifer. Their antics intertwine generally with the main fairy-tale plot, and are for the most part wonderful. Admittedly, the film does slow down a bit between the main introduction of the characters and shortly before the stepsisters depart for the ball, but after this slowdown, the film really gets going again and surprisingly (since \"Cinderella\" is the most worn down story of all time, probably) ends up as one of the most involving Disney stories.
The animation and art direction is lovely. All of the legendary Nine Old Men animated on this picture, and Mary Blair's colour styling and concept art (she also did concept art and colour styling for \"Alice in Wonderland\", \"Peter Pan\", \"The Three Caballeros\" and many many others) manage to wiggle their way on screen. The colours and designs are lovely, especially in the Fairy Godmother and ball scenes, as well as in those pretty little moments here and there.
Overall, \"Cinderella\" ranks as one of the best Disney fairy-tales and comes recommended to young and all that embodies the Disney philosophy that dreams really can come true. \n",
+ "pos\t2081\tI could not agree more with the quote \"this is one of the best films ever made.\" If you think Vanilla Sky is simply a \"re-make,\" you could not be more wrong. There is tremendous depth in this film: visually, musically, and emotionally.
Visually, because the film is soft and delicate at times (early scenes with Sofia) and at other times powerful and intense (Times Square, post-climactic scenes).
The music and sounds tie into this movie so perfectly. Without the music, the story is only half told. Nancy Wilson created an emotional, yet eclectic, score for the film which could not be more suitable for such a dream-like theme (although never released, I was able to get my hands on the original score for about $60. If you look hard, you may be able to find a copy yourself). Crowe's other musical selections, such as The Beach Boys, Josh Rouse, Spiritualized, Sigur Ros, the Monkees, etcetera etcetera, are also perfect fits for the film (Crowe has an ear for great music).
More importantly, the emotional themes in this film (i.e. love, sadness, regret) are very powerful, and are amplified tenfold by the visual and musical experience, as well as the ingenious dialogue; I admit, the elevator scene brings tears to my eyes time and time again.
The best part of this film however (as if it could get any better) is that it is so intelligently crafted such that each time you see the film, you will catch something new--so watch closely, and be prepared to think! Sure, a theme becomes obvious after the first or second watch, but there is always more to the story than you think.
This is easily Cameron Crowe's best work, and altogether a work of brilliance. Much of my film-making and musical inspiration comes from this work alone. It has honestly touched my life, as true art has a tendency of doing. It continually surprises me that there are many people that cannot appreciate this film for what it is (I guess to understand true art is an art itself).
Bottom line: Vanilla Sky is in a league of its own. \n",
+ "neg\t2094\tI'm gettin' sick of movies that sound entertaining in a one-line synopsis then end up being equal to what you'd find in the bottom center of a compost heap.
Who knows: \"Witchery\" may have sounded interesting in a pitch to the studios, even with a \"big name cast\" (like Blair and Hasselhoff - wink-wink, nudge-nudge) and the effervescent likes of Hildegard Knef (I dunno, some woman...).
But on film, it just falls apart faster than a papier-mache sculpture in a rainstorm. Seems these unfortunate folks are trapped in an island mansion off the Eastern seaboard, and one of them (a woman, I'd guess) is being targeted by a satanic cult to bear the child of hell while the others are offed in grotesque, tortuous ways.
Okay, right there you have a cross-section of plots from \"The Exorcist\", \"The Omen\", \"Ten Little Indians\" and a few other lesser movies in the satanic-worshippers-run-amok line. None of it is very entertaining and for the most part, you'll cringe your way from scene to scene until it's over.
No, not even Linda Blair and David Hasselhoff help matters much. They're just in it to pick up a paycheck and don't seem very intent on giving it their \"all\".
From the looks of it, Hasselhoff probably wishes he were back on the beack with Pam Anderson (and who can blame him?) and Linda... well, who knows; a celebrity PETA benefit or pro-am golf tour or whatever it is she's in to nowadays.
And the torture scenes! Ecchhhh. You'll see people get their mouths sewn shut, dangled up inside roaring fireplaces, strung up in trees during a violent storm, vessels bursting out of their necks, etc, etc. Sheesh, and I thought \"Mark of the Devil\" was the most sadistic movie I'd seen....
Don't bother. It's not worth your time. I can't believe I told you as much as I did. If you do watch it, just see if you can count the cliches. And yes, Blair gets possessed, as if you didn't see THAT coming down Main Street followed by a marching band.
No stars. \"Witchery\" - these witches will give you itches. \n",
+ "pos\t2105\tSPOILERS
In the words of Jean-Paul Sartre, \"Hell is other people\". In \"The Odd Couple\", Jack Lemmon and Walter Matthau demonstrate just how accurate this can be. As Felix Ungar and Oscar Madison, Lemmon and Matthau respectively create two good friends who decide to live together. As the two begin to slowly grow more and more frustrated with each other, the laughs come thick and fast, before Felix departs, leaving Oscar a changed and more cleanly individual.
Jack Lemmon as Ungar is absolutely superb as the neurotic, cleaning obsessed divorcee coping with life as a single man. Walter Matthau in contrast to Lemmon's character is equally as good as the slobbish sports writer who simply wants to play poker to earn money for his child benefits.
Lemmon and Matthau are magnificant in their selected parts, to some degree dependent upon the beautiful script by Neil Simon, and simultaneously because they work well as a team. As two friends who are inherently different in lifestyles, although similar in relationships with ex-wives and children, these two, late, great actors create a partnership which is practically impossible to recreate. So great in fact, that the world screamed out so much for something similar, that two years before Matthau's death and three before Lemmon's, the characters were reunited in an inferior sequel. This idea, whilst following Hollywood's irritating obsession with sequels, might have worked to a certain degree, but at the same time, it could never come close to replicating the genius of this original film.
Ultimately it's not really possible to say anything else. With Simon's amazing script, filled with humour and laughter, the creators of this film were already onto a hit. The casting of Jack Lemmon and Walter Matthau as Felix Ungar and Oscar Madison though, is the most important part of this film. \"The Odd Couple\", with it's traditional soundtrack (which even gained a tribute in \"The Simpsons\"), it's excellent script and it's genius leading men, is a tribute to cinema and a feature for history to remember. \n",
+ "neg\t2110\tFirst off, this is no where near as bad as some of the other trash the Sci-Fi Channel has produced; that isn't to say that Grendel is a good film, in fact, it is very bad, but it definitely had potential to be a lot better. The flaws of the film come from character design, character, absurd additions to the story, the visual effects, the music, and for the most part: the acting.
When speaking of character design, I, of course, mean the way our heroes and villains look. Beowulf and the other Danes seem like ridiculous Vikings, rather than warriors of brute strength -- that helmet our main protagonist wears is just too silly. Grendel looks like the Hulk but with strange tentacle-attachments to his elbows.
The characters are very limited. Beowulf is same from beginning to end, however Finn -- a useless sidekick -- achieved some two-dimensionality, due thanks to his romantic subplot, and Unferth gets some notion as well, as he becomes less conceited.
Much like Finn, there are useless additions to this story to make it its own, while still holding to the source material. The crossbow that is gifted to Beowulf is so ridiculous, I'm surprised the cast didn't walk off the set. Besides additions, there's omissions, such as the underlying themes of Christianity and Paganism, as well as the consequences of lying.
The special effects are mighty terrible. Grendel and his mother Hag are poorly conceived, and as such, they're portrayal on screen is less than believable.
The music is overbearing, especially when a character dies.
All in all, this is not Sci-Fi's worst film to date. No. It is actually one of the better films, though trash it still it is, it is good trash, making it a guilty pleasure at best. The only thing that works is the dialogue, which is still wooden here and there.
I highly recommend you skip this film and watch Robert Zemeckis' take on the ancient story of Beowulf, simply because this film (Grendel) is only half the tale, and not the whole thing, which garners this movie a three-star review. \n",
+ "neg\t2112\tAlthough there are a lot of familiar \"television\" names associated with \"A Man Called Sledge\", there is nothing extraordinary about the film itself or about any of the performances. In fact, the only thing that distinguishes it from a 1960's-70's television series like \"The Rat Patrol\" is a bigger cast and a lot more violence.
James Garner is the biggest star and apparently thought he should try to break away from all the light comedy stuff he had been doing (\"Maverick\", \"Support Your Local Sheriff\"-\"Gunfighter\" etc.). Unfortunately his earthy likability works against him, as Sledge is a humorless character written to cash in on the popularity of Clint Eastwood's spaghetti western character. But Eastwood's stuff was not this flat and uninteresting.
I suppose that \"A Man Called Sledge\" could be classified as a spaghetti western although the pacing is too slow to really fit that sub-genre. Fans of the slow-paced \"Combat\" television series will feel an instant connection as Vic Morrow directed the film and co-wrote the script with Frank Kowalski. Throw in some then trendy slow-mo shots and cross-dissolves, which call attention to themselves rather than serve a story-telling purpose.
The plot is the standard \"big heist\" thing (insert \"The War Wagon\" here) with Sledge plotting how to heist a $300,000 gold shipment. His gang includes Claude Akins and Dennis Weaver. The problem is that while on the move the shipment is guarded by 40 outriders and while stopped it is locked in a vault inside the territorial prison. I think there was an episode of \"Alias Smith and Jones\" with the same plot.
The story would make a decent hour of television but gets old very fast as a very padded feature length film. Garner does not allow any of his charm to leak into his characterization and the film does not generate enough suspense to hold a viewer's interest.
The thing finally crashes and burns shortly after the heist when the gang engages in a contrived and totally illogical card game.
Then again, what do I know? I'm only a child. \n",
+ "pos\t2112\tA dying Kung Fu master sends out his last student in order to track down what happened to the previous five students who were members of the banned Poison Clan.He is to see if they are acting for good and if not he his to stop them The master also wants the student to find another member of the clan who ran off with the clans money which the master wants used for good. The earlier students were all taught in a different style snake, centipede, scorpion, lizard, toad, while the last student was taught a little in each style. All of the students end up in one town looking for the old man with all of the money,and soon everyone is battling to get the money.
Classic martial arts film has title that even many non-fans know. I've spoken with a couple of casual fans and this seems to be the one film that sticks in their head. Its a very good movie, though I'm not really sure why this film stays with people when for my money there are other films that are better from the Shaw studio (One armed Swordmen or the Brave Archer series for example). This isn't to sell the film short, its not, since the film is structured like a mystery, our hero has no clue who anyone is and the Venoms themselves only know at best who two of the others is. We're given the identity of four but we still have to work out who the fifth really is. The film is also odd in that for a martial arts film, other than a training sequence at the start and the killing of the old man and his family for the money, there is really no action for about 40 or 45 minutes. Its a bold move to do it but it pays off since the plot and the performances hold your attention. (The film is also odd in that its the first martial arts film I think I've ever seen where there are no women. I don't think one has a speaking role and I'm pretty sure that none appear in the background. Its indicative of nothing, its just something that struck me.) This is a good solid little film that may not live up to the reputation it has in some circles, but is still a really good film to curl up with on your couch.
Around 7 out of 10. \n",
+ "pos\t2116\tI really, really enjoyed watching this movie! At first, seeing its poster I thought it was just another easy romantic comedy ... but it is simply more than this! I personally believe that this idea (that I'm sure a good part of the viewers had just before they saw the movie) it's yet another important part of the big concept of this movie itself (or even of its marketing strategy)! What I mean is: Nowadays we are slaves to images! To impressions! I went to the cinema to view this film having the wrong impression, the wrong expectations, and at the end I felt how superficial I could be! To exemplify it comes to my mind the sequence near the end in which Sidney buys the plane ticket to go back to New York and as he is asked to 'give an autograph', meaning to sign for the ticket, he believes that just because he got on TV thanks to the scandal at the awards he is now some kind of celebrity. And this is just, I believe, the climax of this main theme around which the movies revolves. Above this, I believe the movie also offers us a solution to get along with this, illustrated throughout the movie by Sidney's attitude: don't become too serious about yourself or about anybody else ... \"even saints were people in the beginning\" ... as Sophie once says in the movie. The saints of the moment are the stars. We attribute them an 'aura' of perfection, of eternal happiness, but the reality is much less than that. Even the saints of any religion are images, ideal models of how to behave and how to live your life. Even they were not for real ... they became 'for real' after they died and we looked back at them. And that's the catch: we need our saints! we need our stars! We strive for them as if it wasn't for them we wouldn't have anything to strive for. And television and all other media are means to create and capture our strivings. We desperately need benchmarks in regard to which to measure ourselves. And that's how we got in the cinema to watch this movie in the first place: to see if we can fit the benchmark, or if the benchmark is to small for us. This time it was larger than we expected. \n",
+ "\n",
+ "\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "2IEb5wL9Qudr"
+ },
+ "source": [
+ "### Train Loop Examples\n",
+ "\n",
+ "Now let's look at a model training loop woudl look like. I printed the first 10 batches list of examples lengths to show how nicely they are grouped throughout the dataset!"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "5oCIpdJCQwqZ",
+ "outputId": "bf96e155-8c3a-4c62-a85c-db973cd2b044",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ }
+ },
+ "source": [
+ "# Example of number of epochs\n",
+ "epochs = 1\n",
+ "\n",
+ "# Example of loop through each epoch\n",
+ "for epoch in range(epochs):\n",
+ "\n",
+ " # Create batches - needs to be called before each loop.\n",
+ " torchtext_train_dataloader.create_batches()\n",
+ "\n",
+ " # Loop through BucketIterator.\n",
+ " for sample_id, batch in enumerate(torchtext_train_dataloader.batches):\n",
+ " print('Batch exmaples lengths: %s'.ljust(20) % str([len(example['text']) for example in batch]))\n",
+ "\n",
+ " # Let's break early, you get the idea.\n",
+ " if sample_id == 10:\n",
+ " break"
+ ],
+ "execution_count": 63,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "Batch exmaples lengths: [3208, 3222, 3230, 3294, 3302, 3351, 3359, 3366, 3382, 3417]\n",
+ "Batch exmaples lengths: [731, 733, 734, 734, 735, 735, 736, 736, 736, 737]\n",
+ "Batch exmaples lengths: [780, 781, 781, 783, 783, 783, 783, 783, 784, 785]\n",
+ "Batch exmaples lengths: [1036, 1036, 1037, 1038, 1039, 1040, 1045, 1050, 1051, 1052]\n",
+ "Batch exmaples lengths: [994, 994, 995, 996, 997, 1000, 1001, 1005, 1005, 1006]\n",
+ "Batch exmaples lengths: [930, 930, 932, 933, 945, 950, 951, 951, 953, 955]\n",
+ "Batch exmaples lengths: [1334, 1335, 1338, 1338, 1341, 1341, 1343, 1352, 1353, 1355]\n",
+ "Batch exmaples lengths: [2839, 2840, 2853, 2877, 2887, 2892, 2901, 2915, 2930, 2936]\n",
+ "Batch exmaples lengths: [289, 292, 295, 295, 297, 301, 304, 308, 309, 310]\n",
+ "Batch exmaples lengths: [513, 517, 518, 526, 526, 527, 528, 529, 534, 535]\n",
+ "Batch exmaples lengths: [232, 239, 240, 240, 243, 244, 246, 248, 255, 257]\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "POrx0_EuImpM"
+ },
+ "source": [
+ "## Using PyTorchText TabularDataset\n",
+ "\n",
+ "Now I will use the TabularDataset functionality which creates out PyTorchDataset straing from local files. \n",
+ "\n",
+ "We don't need to create a custom PyTorch Dataset class to load our dataset as long as we have tabular files of our data."
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "SGNgKXkq5X8f"
+ },
+ "source": [
+ "### Data to Files\n",
+ "\n",
+ "Since our dataset is scattered in multiple files, I created a function `files_to_tsv` which puts our dataset into a `.tsv` file (Tab-Separated Values)\n",
+ "\n",
+ "I've created the **file_tsv** function in order to concatenate all text files into a single `.tsv` file.\n",
+ "\n",
+ "Since I'll use the **TabularDataset** from `pytorch.data` I need to pass tabular format file.\n",
+ "\n",
+ "For text data I find the Tab Separated Values format easier to deal with.\n",
+ "\n",
+ "I will call the **files_to_tsv** function for each of the two partitions **train** and **test**. \n",
+ "\n",
+ "The function will return the name of the `.tsv` file saved so we can use it later in PyTorchText."
]
},
{
"cell_type": "code",
"metadata": {
"id": "sn8cBvTx0NLr",
- "colab_type": "code",
+ "outputId": "1b559e65-6264-421e-855d-5de2ab8ab8dd",
"colab": {
"base_uri": "https://localhost:8080/",
- "height": 146,
+ "height": 247,
"referenced_widgets": [
- "4b0c2f038a0d49f4b50ef41121787e67",
- "ddc16dcb642d4f6887dadb4133a35218",
- "048b7c2faaac41c9bb84097dad3522f5",
- "46416a2f4968461fb1659fe5bc1deda6",
- "5d97afc63ee644d498651fb937d7e724",
- "0feed71a59cc487383ee28d68fa44913",
- "bfb3a3d1264e4dd086f0a5e276ce25cb",
- "0017ead47fe9489dbb1569732e811d37",
- "1f2e5e5663ed44a9940f27328e4d51de",
- "e082f6515fb1430da7a8f0f416120f1a",
- "d079c5d500a84d7c8871fd333c0ea4cd",
- "c44d83db9f1b45cca5f8310f17d5a126",
- "c28d141d1bd64acdba79ebcb19ea2b0d",
- "39cda08266884d86ab436abb394f7284",
- "ef7096f4b2894dfcbd8e84ae10b8008c",
- "da3c41fdd92348e39d83e778dfc847e0"
+ "da09e310129e4851a616f4bf5e964fbf",
+ "ae72e62369234928a7dd35bd6a3b5ad8",
+ "8f43d50717784a4e8d94ad781c5ed347",
+ "9f0048d3c1234194a119cde8346a16a7",
+ "e9b15e66116248ddb39e1bde4db47715",
+ "dcc77a039e624a13bd0d338f8745cf73",
+ "1face3ee08304f399779db8848234ddf",
+ "fa609bb4d84742e1916d4edf038a4360",
+ "fce2d470c5904ea0af824659d3a62be1",
+ "ba0eacfa389942c0be0682e0e2ec1a1a",
+ "53f7affef6a0470eb1a00ab92b7f32d6",
+ "4da5c0b42df44d98bd49caee01a4b13a",
+ "e547697e00e345b69e4f115f262859ee",
+ "74e8ef5137d44038b4d7fc3b2b374a9d",
+ "08992322ed48407b98dead3c437508fd",
+ "aa74337e6df7456689b806fec4a93f3d",
+ "759cba0a404d489dbb184e4fb200c280",
+ "0bbbf8dea24b4db99502ff03fe4deca8",
+ "7516526db49f4a8dade530a88d56a451",
+ "95e2edba30344f5b90c822ec500e91a0",
+ "10357831f4ef483d8bebedd302994dba",
+ "5f9e91ed3df64991b018121c6990b343",
+ "d54e99ab3a0849fb984f6d20f3113e5d",
+ "b7036eb8d6e84347badd2a871b83d8f0",
+ "20b455acca3c4a5295ee96677cad8a21",
+ "6d1902fcd23b42a4b1ed9ed245925649",
+ "8708969a67674267b8081e446380197d",
+ "1ec07bd0793e4ff2a579b6b2be4274b8",
+ "59ee69d5f72f473ebffabf3f5b520544",
+ "097800a7c8864a64927d0302c58ec47b",
+ "b4e9b60afb0047a8aba77c36236ae783",
+ "39270b7674c042b9b51499492b30a915"
]
- },
- "outputId": "39643b14-fe83-4822-97dc-67e0797f9587"
+ }
},
"source": [
- "# path where to save tsv file\n",
+ "def files_to_tsv(partition_path, save_path='./'):\n",
+ " \"\"\"Parse each file in partition and keep track of sentiments.\n",
+ " Create a list of paris [tag, text]\n",
+ "\n",
+ " Arguments:\n",
+ "\n",
+ " partition_path (:obj:`str`):\n",
+ " Partition used: train or test.\n",
+ "\n",
+ " save_path (:obj:`str`):\n",
+ " Path where to save the final .tsv file.\n",
+ "\n",
+ " Returns:\n",
+ "\n",
+ " :obj:`str`: Filename of created .tsv file.\n",
+ "\n",
+ " \"\"\"\n",
+ "\n",
+ " # List of all examples in format [tag, text].\n",
+ " examples = []\n",
+ "\n",
+ " # Print partition.\n",
+ " print(partition_path)\n",
+ "\n",
+ " # Loop through each sentiment.\n",
+ " for sentiment in ['pos', 'neg']:\n",
+ "\n",
+ " # Find path for sentiment.\n",
+ " sentiment_path = os.path.join(partition_path, sentiment)\n",
+ "\n",
+ " # Get all files from path sentiment.\n",
+ " files_names = os.listdir(sentiment_path)\n",
+ "\n",
+ " # For each file in path sentiment.\n",
+ " for file_name in tqdm(files_names, desc=f'{sentiment} Files'):\n",
+ "\n",
+ " # Get file content.\n",
+ " file_content = io.open(os.path.join(sentiment_path, file_name), mode='r', encoding='utf-8').read()\n",
+ "\n",
+ " # Fix any format errors.\n",
+ " file_content = fix_text(file_content)\n",
+ "\n",
+ " # Append sentiment and file content.\n",
+ " examples.append([sentiment, file_content])\n",
+ "\n",
+ " # Create a TSV file with same format `sentiment text`.\n",
+ " examples = [\"%s\\t%s\"%(example[0], example[1]) for example in examples]\n",
+ "\n",
+ " # Create file name.\n",
+ " tsv_filename = os.path.basename(partition_path) + '_pos_neg_%d.tsv'%len(examples)\n",
+ "\n",
+ " # Write to TSV file.\n",
+ " io.open(os.path.join(save_path, tsv_filename), mode='w', encoding='utf-8').write('\\n'.join(examples))\n",
+ "\n",
+ " # Return TSV file name.\n",
+ " return tsv_filename\n",
+ " \n",
+ "\n",
+ "# Path where to save tsv file.\n",
"data_path = '/content'\n",
"\n",
- "# convert train files to tsv file \n",
- "train_filename = file_tsv(partition_path='/content/aclImdb/train', save_path=data_path)\n",
+ "# Convert train files to tsv file.\n",
+ "train_filename = files_to_tsv(partition_path='/content/aclImdb/train', save_path=data_path)\n",
"\n",
- "# convert test files to tsv file\n",
- "test_filename = file_tsv(partition_path='/content/aclImdb/test', save_path=data_path)"
+ "# Convert test files to tsv file.\n",
+ "test_filename = files_to_tsv(partition_path='/content/aclImdb/test', save_path=data_path)"
],
- "execution_count": 6,
+ "execution_count": 64,
"outputs": [
{
"output_type": "stream",
@@ -821,12 +2930,35 @@
"output_type": "display_data",
"data": {
"application/vnd.jupyter.widget-view+json": {
- "model_id": "4b0c2f038a0d49f4b50ef41121787e67",
+ "model_id": "da09e310129e4851a616f4bf5e964fbf",
"version_minor": 0,
"version_major": 2
},
"text/plain": [
- "HBox(children=(FloatProgress(value=0.0, max=2.0), HTML(value='')))"
+ "HBox(children=(FloatProgress(value=0.0, description='pos Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "fce2d470c5904ea0af824659d3a62be1",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='neg Files', max=12500.0, style=ProgressStyle(description_…"
]
},
"metadata": {
@@ -845,12 +2977,35 @@
"output_type": "display_data",
"data": {
"application/vnd.jupyter.widget-view+json": {
- "model_id": "1f2e5e5663ed44a9940f27328e4d51de",
+ "model_id": "759cba0a404d489dbb184e4fb200c280",
"version_minor": 0,
"version_major": 2
},
"text/plain": [
- "HBox(children=(FloatProgress(value=0.0, max=2.0), HTML(value='')))"
+ "HBox(children=(FloatProgress(value=0.0, description='pos Files', max=12500.0, style=ProgressStyle(description_…"
+ ]
+ },
+ "metadata": {
+ "tags": []
+ }
+ },
+ {
+ "output_type": "stream",
+ "text": [
+ "\n"
+ ],
+ "name": "stdout"
+ },
+ {
+ "output_type": "display_data",
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "20b455acca3c4a5295ee96677cad8a21",
+ "version_minor": 0,
+ "version_major": 2
+ },
+ "text/plain": [
+ "HBox(children=(FloatProgress(value=0.0, description='neg Files', max=12500.0, style=ProgressStyle(description_…"
]
},
"metadata": {
@@ -869,29 +3024,18 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "JueOpoTP6NFQ",
- "colab_type": "text"
+ "id": "l8rfZyyp4tGR"
},
"source": [
- "# PyTorchText Setup\n",
- "\n",
- "Here I will setup the dataset to be processed by PyTrochText. I will try to add as many useful comments as possible to make the code very easy to adapt to other projects."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "l8rfZyyp4tGR",
- "colab_type": "text"
- },
- "source": [
- "## Setup data fields\n",
+ "### TabularDataset\n",
"\n",
"Here I setup data fields for PyTorchText. We have to tell the library how to handle each column of the `.tsv` file. For this we need to create `data.Field` objects for each column.\n",
"\n",
- "The tokenizer for text column is a simple split on white-space tokenizer. Depending on the project, it can be changed to any tokenizer. It needs to take as input text and output a list.\n",
+ "`text_tokenizer`: \n",
+ "For this example I don't use an actual tokenizer for the `text` column but I need to create one because it requires as input. Depending on the project, here is where you will have your own tokenizer. It needs to take as input text and output a list.\n",
"\n",
- "The label tokenizer is not actually a tokenizer. It just encodes the **pos**into **1** and **neg** into **0**.\n",
+ "`label_tokenizer`\n",
+ "The label tokenizer is also a dummy tokenizer. This is where you will have a encoder to transform labels to ids.\n",
"\n",
"Since we have two `.tsv` files it's great that we can use the `.split` function from **TabularDataset** to handle two files at the same time one for train and the other one for test.\n",
"\n",
@@ -901,149 +3045,213 @@
{
"cell_type": "code",
"metadata": {
- "id": "dhaX2V5N3Y4V",
- "colab_type": "code",
- "colab": {}
+ "id": "dhaX2V5N3Y4V"
},
"source": [
- "# text tokenizer function - simple white-space split\n",
- "text_tokenizer = lambda x : x.split()\n",
- "# label tokenizer - encode labels to int 0:negative and 1:positive\n",
- "label_tokenizer = lambda x: 0 if x=='neg' else 1\n",
+ "# Text tokenizer function - dummy tokenizer to return same text.\n",
+ "# Here you will use your own tokenizer.\n",
+ "text_tokenizer = lambda x : x\n",
"\n",
- "# data fiels for text column - invoke tokenizer\n",
- "TEXT = data.Field(sequential=True, tokenize=text_tokenizer, lower=False)\n",
- "# data field for labels - invoke tokenize label encoder\n",
- "LABEL = data.Field(sequential=True, tokenize=label_tokenizer, use_vocab=False)\n",
+ "# Label tokenizer - dummy label encoder that returns same label.\n",
+ "# Here you will add your own label encoder.\n",
+ "label_tokenizer = lambda x: x\n",
"\n",
- "# create data fields at tuples of description variable and data fiels\n",
+ "# Data fiels for text column - invoke tokenizer.\n",
+ "TEXT = torchtext.data.Field(sequential=True, tokenize=text_tokenizer, lower=False)\n",
+ "\n",
+ "# Data field for labels - invoke tokenize label encoder.\n",
+ "LABEL = torchtext.data.Field(sequential=True, tokenize=label_tokenizer, use_vocab=False)\n",
+ "\n",
+ "# Create data fields at tuples of description variable and data fiels.\n",
"datafields = [(\"label\", LABEL),\n",
" (\"text\", TEXT)]\n",
"\n",
- "# since we have have tab separated data we use TabularDataset\n",
- "train_split, test_split = data.TabularDataset.splits(\n",
- " path=data_path, # path to data\n",
- " train=train_filename, \n",
+ "# Since we have have tab separated data we use TabularDataset\n",
+ "train_dataset, valid_dataset = torchtext.data.TabularDataset.splits(\n",
+ " \n",
+ " # Path to train and validation.\n",
+ " path=data_path,\n",
+ "\n",
+ " # Train data filename.\n",
+ " train=train_filename,\n",
+ "\n",
+ " # Validation file name.\n",
" validation=test_filename,\n",
+ "\n",
+ " # Format of loval files.\n",
" format='tsv',\n",
- " skip_header=False, # important\n",
+ "\n",
+ " # Check if we have header.\n",
+ " skip_header=False,\n",
+ "\n",
+ " # How to handle fields.\n",
" fields=datafields)"
],
- "execution_count": 7,
+ "execution_count": 68,
"outputs": []
},
{
"cell_type": "markdown",
"metadata": {
- "id": "fsmI67Ly4zkc",
- "colab_type": "text"
+ "id": "fsmI67Ly4zkc"
},
"source": [
- "## Bucket Iterator\n",
+ "### PyTorchText Bucket Iterator Dataloader\n",
"\n",
- "Here is where the magic happens! We pass in the **train_split** and **test_split** TabularDatasets splits into **BucketIterator** to create the actual batches.\n",
- "\n",
- "It's very nice that pytorchtext can handle splits! We need to tell the BucketIterator the batch size for both our splits.\n",
- "\n",
- "The **sort_key** parameter is very important. It is used to order text sequences in batches. Since we want to batch sequence of text with similar length, we will use a simple function that returns the length of our text (`len(x.text)`).\n",
- "\n",
- "It is important to keep `sort=False` and `sort_with_batch=True` to sort the batches only and not the whole dataset.\n",
- "\n",
- "Find more details in the pytorchtext **BucketIterator** documentation [here](https://torchtext.readthedocs.io/en/latest/data.html#bucketiterator) - look at the **BPTTIterator** because it has same parameters except the **bptt_len** argument."
+ "I'm using same setup as in the **PyTorchText Bucket Iterator Dataloader** code cell section. The only difference is in the `sort_key` since there is adifferent way to access an example attributes (before we had dictionary format)."
]
},
{
"cell_type": "code",
"metadata": {
- "id": "B5LjIaiy7XcQ",
- "colab_type": "code",
- "colab": {}
- },
- "source": [
- "# batch size used for train\n",
- "train_batch_size = 10\n",
- "\n",
- "# batch size used for test\n",
- "test_batch_size = 20\n",
- "\n",
- "# bucket similar length text sequences together to create batches\n",
- "train_iterator, val_iterator = data.BucketIterator.splits(\n",
- " (train_split, test_split), # datasets for iterator to draw data from\n",
- " batch_sizes=(train_batch_size, test_batch_size),\n",
- " device='cpu', # if we want to load batches on specific device\n",
- " sort_key=lambda x: len(x.text), # what function should use to group batches\n",
- " repeat=True, # repeat the iterator for multiple epochs(DON'T TRUST)\n",
- " sort=False, # avoid sorting all data using sort_key\n",
- " shuffle=True, # if data needs to be shuffled each time batches are created\n",
- " sort_within_batch=True # only sort each batch using sort_key (better to use)\n",
- " )"
- ],
- "execution_count": 8,
- "outputs": []
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "id": "h2bJtSlW42Fo",
- "colab_type": "text"
- },
- "source": [
- "## Sample batch\n",
- "\n",
- "Now let's see how a batch looks like!\n",
- "\n",
- "The print format is: `label sequence_length tokenized_text`.\n",
- "\n",
- "We see the labels as **0** and **1** values along with the length of tokens for that text sequence and along with the list of tokens from that sequence.\n",
- "\n",
- "It looks like the lengths of sequences for this batches are very close together! This actually works!\n",
- "\n",
- "## Note:\n",
- "I would call `.create_batches()` after each epoch. The `repeat=True` in **BucketIterator** should allow more epochs to run but I don't trust it!"
- ]
- },
- {
- "cell_type": "code",
- "metadata": {
- "id": "wN63h48c7fRy",
- "colab_type": "code",
+ "id": "50Uno-i7Pwor",
+ "outputId": "7cfe4a00-16e0-4ad6-bdb2-f4e7905e7c78",
"colab": {
- "base_uri": "https://localhost:8080/",
- "height": 248
- },
- "outputId": "77e530f8-5dd5-4d45-9f0f-38ded5b39afc"
+ "base_uri": "https://localhost:8080/"
+ }
},
"source": [
- "# create batches - needs to be called after each epoch\n",
- "train_iterator.create_batches()\n",
+ "# Group similar length text sequences together in batches.\n",
+ "torchtext_train_dataloader, torchtext_valid_dataloader = torchtext.data.BucketIterator.splits(\n",
+ " \n",
+ " # Datasets for iterator to draw data from\n",
+ " (train_dataset, valid_dataset),\n",
"\n",
- "# loop through each batch\n",
- "for batch in train_iterator.batches:\n",
- " print(len(batch))\n",
+ " # Tuple of train and validation batch sizes.\n",
+ " batch_sizes=(train_batch_size, valid_batch_size),\n",
"\n",
- " # print each example\n",
- " for example in batch:\n",
- " print(example.label, len(example.text), example.text)\n",
- " print('\\n')\n",
- " \n",
- " break"
+ " # Device to load batches on.\n",
+ " device=device, \n",
+ "\n",
+ " # Function to use for sorting examples.\n",
+ " sort_key=lambda x: len(x.text),\n",
+ "\n",
+ "\n",
+ " # Repeat the iterator for multiple epochs.\n",
+ " repeat=True, \n",
+ "\n",
+ " # Sort all examples in data using `sort_key`.\n",
+ " sort=False, \n",
+ "\n",
+ " # Shuffle data on each epoch run.\n",
+ " shuffle=True,\n",
+ "\n",
+ " # Use `sort_key` to sort examples in each batch.\n",
+ " sort_within_batch=True,\n",
+ " )\n",
+ "\n",
+ "# Print number of batches in each split.\n",
+ "print('Created `torchtext_train_dataloader` with %d batches!'%len(torchtext_train_dataloader))\n",
+ "print('Created `torchtext_valid_dataloader` with %d batches!'%len(torchtext_valid_dataloader))"
],
- "execution_count": 9,
+ "execution_count": 69,
"outputs": [
{
"output_type": "stream",
"text": [
- "10\n",
- "0 212 ['Sometimes', 'a', 'premise', 'starts', 'out', 'good,', 'but', 'because', 'of', 'the', 'demands', 'of', 'having', 'to', 'go', 'overboard', 'to', 'meet', 'the', 'demands', 'of', 'an', 'audience', 'suffering', 'from', 'attention-deficit', 'disorder,', 'it', 'devolves', 'into', 'an', 'incongruous', 'mess.', 'And', 'for', 'three', 'well-respected', 'actors', 'who', 'have', 'made', 'better', 'work', 'before', 'and', 'after,', 'this', 'is', 'a', 'mortal', 'shame.
So', \"let's\", 'see.', 'Premise:', 'a', 'loving', 'couple', 'who', 'lives', 'in', 'a', 'beautiful', 'home', 'is', 'threatened', 'by', 'a', 'bad', 'cop.', 'Interesting', 'to', 'say', 'the', 'least.', 'Make', 'the', 'encroaching', 'cop', 'a', 'little', 'disturbing,', 'why', 'not.', 'It', 'was', 'well', 'done', 'in', 'THE', 'HAND', 'WHO', 'ROCKS', 'THE', 'CRADLE', 'and', 'SINGLE', 'WHITE', 'FEMALE,', 'and', \"it's\", 'a', 'proved', 'ticket', 'to', 'a', 'successful', 'thriller.
Now', 'herein', 'lies', 'the', 'dilemma.', 'Create', 'a', 'disturbing', 'story', 'that', 'actually', 'bothers', 'to', 'bring', 'some', 'true', 'menace', 'into', 'its', 'main', 'characters', 'while', 'never', 'going', 'so', 'far', 'as', 'to', 'look', 'ridiculous,', 'or', 'throw', 'any', 'semblance', 'to', 'reality,', 'amp', 'up', 'the', 'shock', 'factor,', 'and', 'make', 'this', 'cop', 'so', 'extreme', '--', 'an', 'ultra', 'bad', 'variation', 'of', 'every', 'other', 'super-villain', \"that's\", 'hit', 'cinemas', 'since', 'the', 'silent', 'age.
The', 'producers,', 'and', 'directors,', 'chose', 'the', 'latter.', 'Thus', 'is', 'the', 'resulting', 'film', '--', 'badly', 'made,', 'with', 'actors', 'trying', 'their', 'darnedest', 'to', 'make', 'heads', 'or', 'tails', 'in', 'roles', 'that', \"they've\", 'essayed', 'before,', 'and', 'nothing', 'much', 'amounting', 'to', 'even', 'less.']\n",
- "1 212 [\"I've\", 'loved', 'all', 'of', \"Cream's\", 'work,', 'even', 'as', 'there', 'is', 'such', 'a', 'small', 'and', 'precious', 'catalog', 'of', 'work', 'to', 'take', 'hold.', 'Even', 'when', 'they', 'go', 'for', 'as', 'long', 'as', 'twenty', 'minutes', 'with', 'some', 'of', 'their', 'songs', '(Spoonful', 'and', 'Toad', 'off', 'of', 'Wheels', 'of', 'Fire', 'are', 'prime', 'examples)', 'still', 'rock', 'the', 'socks', 'off', 'of', 'more', 'than', 'half', 'of', 'any', 'given', 'rock', 'act', 'working', 'today.', 'This', 'power', 'to', 'gel', 'on', 'stage', 'is', 'given', 'one', 'of', 'the', 'most', 'anticipate', 'rock', 'band', 'reunions', 'ever', 'with', 'their', 'Royal', 'Albert', 'Hall', 'shows', 'last', 'year.', 'They', 'may', 'have', 'gotten', 'older,', 'as', 'have', 'their', 'fans,', 'but', 'the', 'energy', 'is', 'still', 'there,', 'with', 'the', 'great', 'arrangements', 'of', 'classic', 'blues', 'songs', 'as', 'well', 'as', 'their', 'own.', 'The', 'renditions', 'of', 'White', 'Room,', 'Badge,', 'Politician,', 'Spoonful,', 'Sunshine', 'of', 'Your', 'Love,', 'not', 'one', 'seems', 'to', 'miss', 'a', 'beat.', \"Clapton's\", 'solos', 'have', 'a', 'formation', 'that', 'he', 'sometimes', \"doesn't\", 'have', 'when', 'on', 'stage', 'with', 'his', 'solo', 'band.', 'Ginger', 'Baker,', 'enough', 'said.', 'Jack', 'Bruce', 'is', 'sturdy', 'enough', 'with', 'his', 'vocals', 'still', 'with', 'a', 'kind', 'of', 'power', 'that', 'Clapton', 'could', 'never', 'get', 'on', 'his', 'own.', 'Bottom', 'line,', 'if', 'you', 'want', 'to', 'see', 'what', 'were', 'the', 'best', 'shows', 'you', 'wish', 'you', 'had', 'seen', 'last', 'year', '(well,', 'some', 'may', 'have', 'seen', 'them),', \"it's\", 'all', 'on', 'this', 'DVD,', 'with', 'cool', 'special', 'features.']\n",
- "0 213 ['Apparently', 'most', 'viewer', 'knows', 'nothing', 'about', 'the', 'history', 'of', 'Europe,', 'including', 'Germany,', 'Hungary', 'and', 'the', 'whole', 'Central', 'and', 'Eastern', 'Europe', 'as', 'well', 'as', 'the', 'Hitler', 'and', 'Stalin', 'Era.', 'Nuremberg', '(and', 'a', 'lot', 'of', 'forgotten', 'trials', 'all', 'over', 'Europe)', 'was', 'a', 'revenge', 'and', 'injustice', 'of', 'the', 'winners.', 'What', 'do', 'you', 'think,', 'why', 'were', 'not', 'any', 'American,', 'British,', 'French', 'or', 'Soviet', 'defendants', 'after', 'the', 'WWII?', 'There', 'were', 'no', 'American,', 'British', 'etc.', 'war', 'crimes?', 'There', 'were', 'no', 'Hiroshima,', 'no', 'Nagasaki,', 'no', 'Tokyo,', 'no', 'Dresden,', 'no', 'Hamburg,', 'no', 'Berlin,', 'no', 'Katyn', 'and', 'so', 'on?', 'The', 'Germans', 'had', 'war', 'crimes', 'too,', 'but', 'in', 'Nuremberg', 'the', 'justice', 'was', 'not', 'a', 'real', 'consideration.', 'The', 'main', 'point', 'was:', 'Vae', 'victis!', 'Germany', 'must', 'perish!', '(That', 'was', 'also', 'a', 'book', 'title', 'in', 'America,', '1941.)
This', 'film', 'is', 'an', 'awful,', 'ignoble', 'American', 'brainwashing', 'instrument,', 'full', 'of', 'error,', 'lie,', 'propaganda,', 'prejudice', 'and', 'injustice.', 'And', 'first', 'of', 'all:', 'full', 'of', 'hypocrisy.', 'But', 'not', 'surprisingly...', 'Why', \"wasn't\", 'enough', 'the', 'Nuremberg', 'process', 'itself?', 'This', 'film', 'is', 'a', 'nightmare.', 'Total', 'darkness', 'after', '60', 'years!', 'This', 'darkness', '(and', 'hate', 'and', 'narcissism', 'and', 'lack', 'of', 'self-criticism)', 'is', 'the', 'real', 'cause', 'of', 'the', 'massacres', 'in', 'Korea,', 'Vietnam,', 'Cambodia,', 'Serbia,', 'Iraq', 'and', 'so', 'forth.', 'And', 'there', 'are', 'no', 'American', 'war', 'criminals...', 'Bravo,', 'America!', 'Very', 'clever.', 'Even', 'Stalin', 'would', 'become', 'envious', 'of', 'it...']\n",
- "0 214 ['This', 'movie', 'had', 'the', 'potential', 'to', 'be', 'really', 'good,', 'considering', 'some', 'of', 'the', 'plot', 'elements', 'are', 'borrowed', 'from', 'the', 'sci-fi', 'actioner', 'THE', 'HIDDEN.', 'And', 'Dolph', 'always', 'lends', 'some', 'cheesy', 'appeal', 'to', 'his', 'roles.', 'But', 'someone', 'somewhere', 'really', 'dropped', 'the', 'ball', 'on', 'this', 'one.
Dolph', 'plays', 'a', 'butt-kicking', 'monk', '(!)', 'who', 'travels', 'to', 'New', 'York', 'to', 'retrieve', 'a', 'key', 'that', 'unlocks', 'a', 'door', 'beneath', 'his', 'monastery', 'that', 'has', 'imprisoned', 'the', 'antichrist', 'for', '2000', 'years.', 'He', 'must', 'battle', 'the', 'minion,', 'who', 'is', 'a', 'spirit', 'that', 'jumps', 'from', 'body', 'to', 'body', 'much', 'like', 'THE', 'HIDDEN', 'and', 'JASON', 'GOES', 'TO', 'HELL.', 'The', 'minion,', 'naturally,', 'wants', 'the', 'key', 'so', 'it', 'can', 'let', 'the', 'antichrist', 'out.', 'Along', 'for', 'the', 'ride', 'is', 'an', 'annoying', 'female', 'archaeologist', 'and', 'together', 'she', 'and', 'Dolph', 'are', 'chased', 'by', 'the', 'minion-possessed', 'bodies.
If', \"I'm\", 'making', 'this', 'sound', 'entertaining,', 'forget', 'it.', 'The', 'pacing', 'is', 'very', 'awkward', 'and', 'sluggish,', 'the', 'acting', 'subpar', 'at', 'best,', 'and', 'the', 'fight', 'scenes', 'staged', 'poorly.', 'Dolph', 'sleepwalks', 'through', 'his', 'role', 'and', 'spouts', 'some', 'of', 'the', 'worst', 'dialogue', 'of', 'his', 'career.
The', 'cheese', 'factor', 'really', 'picks', 'up', 'at', 'the', 'end', 'when', 'the', 'minion', 'battles', 'an', 'army', 'of', 'machine-gun', 'wielding', 'monks', 'at', 'the', 'monastery,', 'but', 'the', 'rest', 'of', 'this', 'flick', 'is', 'a', 'snoozefest.
Too', 'bad,', 'I', 'really', 'wanted', 'to', 'like', 'this.']\n",
- "1 215 ['This', 'is', 'the', 'Neil', 'Simon', 'piece', 'of', 'work', 'that', 'got', 'a', 'lot', 'of', 'praises!', '\"The', 'Odd', 'Couple\"', 'is', 'a', 'one', 'of', 'a', 'kind', 'gem', 'that', 'lingers', 'within.', 'You', 'got', 'Felix', 'Ungar(Jack', 'Lemmon);', 'a', 'hypochondriac,', 'fussy', 'neat-freak,', 'and', 'a', 'big', 'thorn', 'in', 'the', 'side', 'of', 'his', 'roommate,', 'Oscar', 'Madison(Walter', 'Matthau);', 'a', 'total', 'slob.', 'These', 'men', 'have', 'great', 'jobs', 'though.', 'Felix', 'is', 'a', 'news', 'writer,', 'and', 'Oscar', 'is', 'a', 'sports', 'writer.', 'Both', 'of', 'these', 'men', 'are', 'divorced,', \"Felix's\", 'wife', 'is', 'nearby,', 'while', \"Oscar's\", 'is', 'on', 'the', 'other', 'side', 'of', 'the', 'U.S.', '(The', 'West', 'Coast).', 'Well,', 'what', 'can', 'you', 'say?', 'Two', 'men', 'living', 'in', 'one', 'roof', 'together', 'without', 'driving', 'each', 'other', 'crazy,', 'is', 'impossible', 'as', 'well', 'as', 'improbable.', \"It's\", 'a', 'whole', 'lot', 'of', 'laughs', 'and', 'a', 'whole', 'lot', 'of', 'fun.', 'I', 'liked', 'the', 'part', 'where', 'when', 'those', 'two', 'British', 'neighbors', 'that', 'speak', 'to', 'both', 'gentlemen,', 'and', 'after', 'Oscar', 'kicked', 'out', 'Felix,', 'he', 'gets', 'lucky', 'and', 'lives', 'with', 'them', 'when', 'he', 'refused', 'to', 'have', 'dinner', 'with', 'them', 'the', 'night', 'earlier.', \"It's\", 'about', 'time', 'that', 'Felix', 'needed', 'to', 'lighten', 'up.', 'I', 'guess', 'all', 'neat-freaks', 'neat', 'to', 'lighten', 'up.', 'They', 'can', 'be', 'fussy,', 'yet', 'they', 'should', 'be', 'patient', 'as', 'well.', 'A', 'very', 'fun', 'movie,', 'and', 'a', 'nuevo', 'classic.', 'Neil', \"Simon's\", '\"The', 'Odd', 'Couple\"', 'is', 'a', 'must', 'see', 'classic', 'movie.', '5', 'STARS!']\n",
- "0 215 ['Hanna-Barbera', 'sucks', 'the', 'life', 'out', 'of', 'another', 'famous', 'property.', 'The', 'violence', 'is', 'watered', 'down,', 'the', 'stories', 'are', 'formulaic,', 'the', 'animation', 'is', 'bad,', 'the', 'music', 'is', 'obnoxious', 'and', 'repetitive,', 'and', 'frankly,', 'the', 'show', 'just', \"isn't\", 'funny.
At', 'the', 'time,', 'H-B', 'put', 'every', 'one', 'of', 'its', 'series', 'through', 'the', 'same', 'clichéd', 'situations,', 'regardless', 'if', 'it', 'fit', 'the', 'world', 'of', 'the', 'cartoon', 'or', 'not.', 'Thus,', 'Popeye', 'and', 'Bluto', 'appear', 'in', 'a', 'recurring', 'segment', 'as', 'cavemen', '(\"Hey!', 'Popeye', 'is', 'popular,', 'and', 'the', 'Flinstones', 'are', 'popular.', 'Put', \"'em\", 'together,', 'and', 'you', \"can't\", 'miss!\").', 'Also,', 'in', 'an', 'apparent', 'ripoff', 'of', '\"Private', 'Benjamin,\"', 'Olive', 'Oyl', 'and', 'the', 'Goon', 'have', 'a', 'regular', 'segment', 'that', 'features', 'them', 'as', 'new', 'army', 'recruits.', 'Seriously!', 'Why?', '
Adding', 'to', 'the', 'annoyance', 'factor', 'are', 'the', 'public', 'service', 'announcements', 'in', 'every', 'episode', '(standard', 'practice', 'at', 'the', 'time', 'for', 'cartoons,', 'but', 'still', 'annoying).', 'Popeye', 'lectures', 'his', 'nephews', 'on', 'crossing', 'the', 'street', 'safely,', 'recycling,', 'and', '-', 'are', 'you', 'ready', 'for', 'this?', '-', 'the', 'dangers', 'of', 'smoking!', '(I', 'swear', \"I'm\", 'not', 'making', 'that', 'up.)
The', 'only', 'charm', 'remaining', 'from', 'the', 'original', 'cartoons', 'is', 'that', 'Jack', 'Mercer,', 'the', 'voice', 'of', 'Popeye', 'from', 'the', 'early', 'days,', 'continues', 'the', 'role', 'here.
Worth', 'checking', 'out', 'once', 'just', 'to', 'get', 'a', 'new', 'appreciation', 'for', 'the', 'old', 'Fleischer', 'shorts.', 'Otherwise,', 'avoid', 'at', 'all', 'costs.']\n",
- "1 215 ['I', 'saw', 'this', '25', 'years', 'ago', 'on', 'PBS.', 'It', 'was', 'very', 'difficult', 'to', 'watch.', 'So', 'real.', 'To', 'watch', 'this', 'small', 'family', 'struggle', 'in', 'the', 'winter', 'was', 'heart', 'rending.', 'No', 'time', 'for', 'courting:', 'fate', 'has', 'thrown', 'us', 'together', 'and', 'we', 'put', 'our', 'shoulders', 'to', 'the', 'grindstone', 'and', 'make', 'it', 'work.', 'This', 'was', 'based', 'on', 'the', \"woman's\", 'actual', 'diary,', 'which', 'I', 'read', 'many', 'years', 'later.', 'She', 'said', 'in', 'her', 'diary', 'that', 'her', 'parents', 'died', 'when', 'she', 'was', 'little', 'and', 'all', 'their', 'bothers', 'and', 'sisters', 'had', 'to', 'work', 'the', 'farm', 'to', 'feed', 'themselves.', 'She', 'learned', 'to', 'mow,', 'which', 'was', 'not', 'lady-like.', 'She', 'was', 'afraid', 'that', 'no', 'prince', 'charming', 'would', 'want', 'a', 'woman', 'with', 'sun-browned,', 'calloused', 'hands,', 'but', 'this', 'husband', 'was', 'so', 'happy', 'that', 'his', 'new', 'wife', 'knew', 'how', 'to', 'mow,', 'and', 'she', 'was', 'happy', 'to', 'do', 'it.', 'Both', 'were', 'widowed', 'and', 'together', 'they', 'worked', 'to', 'build', 'a', 'new', 'home.', 'It', 'was', 'so,', 'so', 'sad', 'when', 'the', 'baby', 'died.', 'Of', 'course,', 'if', 'they', 'had', 'it', 'today,', 'I', 'am', 'sure', 'it', 'would', 'have', 'been', 'fine.', 'That', 'only', 'makes', 'the', 'tragedy', 'extra', 'sad.', 'I', 'was', 'crying', 'so', 'hard.', 'But', 'then', 'they', 'went', 'out', 'and', 'successfully', 'pulled', 'out', 'a', 'new', 'calf.', 'Spring', 'is', 'on', 'its', 'way,', 'and', 'life', 'goes', 'on.', 'In', 'her', 'diary,', 'she', 'did', 'have', 'two', 'more', 'boys', 'and', 'they', 'lived.']\n",
- "1 216 ['I', 'was', 'going', 'to', 'bed', 'with', 'my', 'gf', 'last', 'night,', 'and', 'while', 'she', 'was', 'brushing', 'her', 'teeth,', 'I', 'flipped', 'channels', 'until', 'I', 'came', 'across', 'this', 'Chinese', 'movie', 'called', 'the', 'King', 'of', 'Masks.', 'At', 'first', 'I', 'thought', 'it', 'was', 'going', 'to', 'be', 'a', 'Kung', 'Fu', 'movie,', 'so', 'I', 'started', 'watching', 'it,', 'and', 'then', 'it', 'immediately', 'captured', 'me', 'in,', 'and', 'I', 'had', 'to', 'finish', 'it.
The', 'little', 'girl', 'in', 'the', 'movie', 'was', 'absolutely', 'adorble.', 'She', 'was', 'such', 'a', 'great', 'actor', 'for', 'being', 'so', 'little.', 'Maybe', 'the', 'fact', 'it', 'was', 'in', 'Chinese,', 'so', 'the', 'English', 'was', 'dubbed', 'made', 'it', 'harder', 'for', 'me', 'to', 'tell...but', 'she', 'really', 'seemed', 'to', 'be', 'in', 'character', 'perfectly.', 'I', 'felt', 'so', 'bad', 'for', 'the', 'girl', 'as', 'she', 'kept', 'trying', 'to', 'please', 'her', '\"boss\"', 'but', 'everything', 'just', 'turned', 'out', 'rotten.', 'lol.', 'Even', 'when', 'she', 'brings', 'him', 'another', 'grandson,', 'just', 'so', 'he', 'can', 'pass', 'on', 'his', 'art...it', 'turns', 'out', 'that', 'kid', 'was', 'kidnapped,', 'so', 'he', 'gets', 'arrested', 'and', 'has', '5', 'days', 'to', 'live.', 'lol...whatever', 'she', 'touches', 'in', 'an', 'effort', 'to', 'be', 'nice', 'to', 'her', 'grandpa,', 'just', 'backfires.
In', 'the', 'end,', 'he', 'sees', 'how', 'much', 'love', 'is', 'in', 'her', 'and', 'teaches', 'her', 'the', 'art', 'of', 'masks...which', 'is', 'just', 'so', 'heartwarming', 'after', 'all', 'the', 'mishaps', 'in', 'the', 'movie.
Definitely', 'a', 'gem,', 'and', 'totally', 'original.
Scott']\n",
- "1 218 ['Gender', 'Bender', 'the', 'Limerick:
A', 'man', 'or', 'a', 'woman?', 'Who', 'knows?
It', 'turns', 'out', 'that', \"'it'\", 'is', 'both.
Sleeping', 'in', 'clay
Then', 'they', 'all', 'went', 'away
In', 'one', 'of', 'their', 'UFOs.
Gender', 'Bender', 'is', 'another', 'great', 'Season', '1', 'episode.', 'I', 'enjoy', 'this', 'one', 'because', 'the', 'story', 'is', 'the', 'kind', 'where', 'you', 'are', 'never', 'really', 'sure', \"what's\", 'gonna', 'happen', 'next.', 'It', 'is', 'entirely', 'original.', 'The', 'teaser', 'is', 'very', 'fun', 'with', 'the', 'close', 'up', 'of', 'the', 'eye', 'and', 'the', 'reflection', 'of', 'the', 'disco', 'lights.', 'I', 'really', 'need', 'to', 'learn', 'my', 'that', 'thumb', 'trick', 'the', 'genderbender', 'heshe', 'does.', 'I', 'really', 'like', 'the', 'atmosphere', 'at', 'the', \"Kindred's\", 'little', 'village', 'and', 'Mulder', 'and', 'Scully', 'sneaking', 'around', 'in', 'the', 'middle', 'of', 'the', 'night.', 'Its', 'very', 'exciting.', 'This', 'is', 'one', 'of', 'my', 'favorite', 'Season', '1', 'episodes', 'in', 'fact.', 'I', 'think', 'the', 'thing', 'I', 'like', 'about', 'it', 'so', 'much', 'is', 'how', 'they', 'turn', 'out', 'to', 'be', 'aliens', 'in', 'the', 'end', 'and', 'left', 'crop', 'circles.', 'Many', 'people', 'see', 'this', 'as', 'a', 'non-mythology', 'related', 'alien', 'episode', 'kind', 'of', 'like', '\"The', 'Unnatural\"', 'or', '\"Space\"', 'but', 'I', 'think', 'this', 'could', 'easily', 'be', 'seen', 'as', 'mythology', 'related.', 'Maybe', 'the', 'genderbender', 'was', 'just', 'like', 'the', 'alien', 'bounty', 'hunter', 'and', 'could', 'appear', 'to', 'look', 'like', 'anyone.', 'Huh?', 'Anyway', 'I', 'give', 'the', 'episode', 'a', '9', 'out', 'of', '10.']\n",
- "1 219 ['Joseph', 'L.', 'Mankiewicz', 'is', 'not', 'remembered', 'by', 'most', 'today', 'as', 'one', 'of', 'the', 'finest', 'directors', 'in', 'Hollywood', 'history,', 'but', 'this', 'film', 'proves', 'that', 'he', 'is.', 'Already', 'a', 'success', 'by', 'doing', 'sophisticated', 'American', 'dramas', 'such', 'as', 'A', 'Letter', 'to', 'Three', 'Wives', 'and', 'All', 'About', 'Eve', 'as', 'well', 'as', 'successfully', 'adapting', 'Shakespeare', 'to', 'life', 'in', 'Julius', 'Caesar,', 'Mankiewicz', 'does', 'a', 'marvelous', 'job', 'of', 'bringing', 'this', 'hit', 'Broadway', 'play', 'to', 'film', 'and', 'does', 'it', 'with', 'style.', 'Marlon', 'Brando', 'is', 'perfect', 'as', 'Sky', 'Masterson,', 'even', 'if', 'he', \"can't\", 'sing', 'too', 'well.', 'He', 'is', 'the', 'only', 'actor', 'who', 'could', 'pull', 'it', 'off', 'perfectly', 'wit', 'his', 'sheer', 'coolness', 'and', 'clarity.', 'Frank', 'Sinatra', 'is', 'a', 'wonderful', 'singer,', 'as', 'expected,', 'and', 'does', 'a', 'good', 'job', 'of', 'acting', 'as', 'Nathan', 'Detroit.', 'Jean', 'Simmons', 'is', 'also', 'very', 'good', 'as', 'Sarah', 'Brown', 'and', 'her', 'scenes', 'with', 'Brando', 'sizzle', 'with', 'great', 'chemistry.', 'All', 'supporting', 'actors', 'do', 'their', 'part,', 'especially', 'Sheldon', 'Leonard', 'as', 'Harry', 'the', 'Horse', 'in', 'a', 'very', 'funny', 'bit.', 'Still,', 'Mankiewicz', 'should', 'be', 'given', 'most', 'of', 'the', 'credit', 'for', 'bringing', 'a', 'fine', 'musical', 'in', 'its', 'own', 'right', 'to', 'the', 'screen', 'in', 'such', 'a', 'way', 'that', 'it', 'feels', 'authentic', 'in', 'many', 'scenes', 'but', 'is', 'still', 'a', 'story', 'in', 'its', 'own', 'world.', 'All', 'in', 'all,', 'Guys', 'and', 'Dolls', 'is', 'a', 'great', 'musical', 'and', 'works', 'on', 'many', 'levels', 'it', 'normally', 'should', 'not', 'have.']\n",
+ "Created `torchtext_train_dataloader` with 2500 batches!\n",
+ "Created `torchtext_valid_dataloader` with 1250 batches!\n"
+ ],
+ "name": "stdout"
+ }
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "h2bJtSlW42Fo"
+ },
+ "source": [
+ "### Compare DataLoaders\n",
+ "\n",
+ "Let's compare the PyTorch DataLoader batches with the PyTorchText BucketIterator batches created with TabularDataset. We can see how nicely examples of similar length are grouped in same batch with PyTorchText.\n",
+ "\n",
+ "**Note:** *When using the PyTorchText BucketIterator, make sure to call `create_batches()` before looping through each batch! Else you won't get any output form the iterator.*"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "metadata": {
+ "id": "U7oII00Xz7e5",
+ "outputId": "542a3d7e-c809-48b0-a48f-154f4552c672",
+ "colab": {
+ "base_uri": "https://localhost:8080/"
+ }
+ },
+ "source": [
+ "# Loop through regular dataloader.\n",
+ "print('PyTorch DataLoader\\n')\n",
+ "for batch in torch_train_dataloader:\n",
+ " \n",
+ " # Let's check batch size.\n",
+ " print('Batch size: %d\\n'% len(batch['text']))\n",
+ " print('LABEL\\tLENGTH\\tTEXT'.ljust(10))\n",
+ "\n",
+ " # Print each example.\n",
+ " for text, label in zip(batch['text'], batch['label']):\n",
+ " print('%s\\t%d\\t%s'.ljust(10) % (label, len(text), text))\n",
+ " print('\\n')\n",
+ " \n",
+ " # Only look at first batch. Reuse this code in training models.\n",
+ " break\n",
+ " \n",
+ "\n",
+ "# Create batches - needs to be called before each loop.\n",
+ "torchtext_train_dataloader.create_batches()\n",
+ "\n",
+ "# Loop through BucketIterator.\n",
+ "print('PyTorchText BuketIterator\\n')\n",
+ "for batch in torchtext_train_dataloader.batches:\n",
+ "\n",
+ " # Let's check batch size.\n",
+ " print('Batch size: %d\\n'% len(batch))\n",
+ " print('LABEL\\tLENGTH\\tTEXT'.ljust(10))\n",
+ " \n",
+ " # Print each example.\n",
+ " for example in batch:\n",
+ " print('%s\\t%d\\t%s'.ljust(10) % (example.label, len(example.text), example.text))\n",
+ " print('\\n')\n",
+ " \n",
+ " # Only look at first batch. Reuse this code in training models.\n",
+ " break"
+ ],
+ "execution_count": 72,
+ "outputs": [
+ {
+ "output_type": "stream",
+ "text": [
+ "PyTorch DataLoader\n",
+ "\n",
+ "Batch size: 10\n",
+ "\n",
+ "LABEL\tLENGTH\tTEXT\n",
+ "pos\t1770\tHOLLOW MAN is one of the better horror films of the past decade. The sub-plot is original and the main plot is even better. The special effects are brilliant and possibly the best I have ever seen in a horror film. Kevin Bacon proves again that he can handle any role that comes his way.
Claude Rains shocked the world with THE INVISIBLE MAN in 1933, well now, Kevin Bacon has shocked *us* with HOLLOW MAN. One of the most thrilling horror films ever. The action is intense and the chills are true. You may actually find yourself jumping if you are watching it in the dark on a stormy night. The supporting cast includes Elizabeth Shue, Josh Brolin, Kim Dickens, Joey Slotnick, Greg Grunberg, and Mary Randle. All of whom do an exceptional job.
---SPOILERS---
Dr. Sebastian Caine (Kevin Bacon) and his team have discovered the secret to making someone invisible. After animal testings, they move on to human testing. But someone has to be the subject. Volenteering, Caine is turned invisible. But when his team is unable to bring back into visibility, Caine is driven mad by his condition as he seeks his revenge...*end spoilers*
The film has created memorable shock sequences and is destined to become a classic well into the next century. Becoming the basis for a spoof joke in SCARY MOVIE 2, this film grabs you by the throat and never lets go. The first 45 minutes or so are slow, developing the characters and showing how their experiments work. The second half is exciting and appealing to most action and horror fans. Think of DEEP BLUE SEA. Then change the sharks into an crazy invisible man. And then change the water into fire and explosions. A rehashing of a killer shark movie. Interesting... HOLLOW MAN gets 5/5. \n",
+ "pos\t1280\tThis movie is one for the ages. First, I have to say after seeing this once, it became one of my all-time favorite movies. Why? Simple; Ben Coccio (writer, director)has put together a true piece of art. Where 99.9% of movies these days are purely entertainment, director Ben Coccio gives us truth, gives us reality, gives us a learning tool to know why this happened. The mainstream media spins and spins but Ben Coccio looks school shootings right in the face, able to go where no other form of media has EVER gone before, into the minds and hearts of two young men planning to kill their classmates. While it surely is graphic and horrifying, how couldn't it be? The gloves come off, the lies and the sugar coating of our media masters is brushed aside and we are taken to a place where we can find truth in what happened. Sometimes it isn't just a screw loose like everyone likes to think, no, sometimes hatred and isolation are deeper, are more human, we are shown that these boys are us and we them. Society left them behind and the consequences are horrifying and real.
Respect and love your fellow man. A lesson we all should learn, thank you so much for making this film Mr. Coccio, I hope with great anticipation that you will continue your film-making career. \n",
+ "neg\t968\tI am so confused. What in the world was this movie about? What was the killer's motivation? He seemed quite angry, but I have yet to figure out why. Nothing in this movie made sense. It had zero depth. Or less than zero depth. Which I guess would make it a hill. Or a pile. Of crap. The acting was horrible. When I searched for a few of the actors in this movie, they had been in very few things that I had heard of, and that came as absolutely no surprise. I can't decide whether to feel sorry for them for the embarrassment of being in a movie this bad, or to feel that they should never be offered another acting job again. Starting . . . NOW! (Seinfeld reference.) Really, though, don't waste your time with this. There's so little substance that there's nothing there even just to make fun of. This was undoubtedly one of the worst slasher flicks -- NO, one of the worst flicks of ANY KIND, that I have ever had the misfortune to watch, and I've seen quite a few. \n",
+ "neg\t1665\tOh, dear lord.... They've turned what was a fairly thought provoking movie into a swaggering testosterone fest.
The original 1971 version of this movie was beautifully vague about our hero Kowalski. He was a man trying to drive from Denver to San Fransisco to win a bet. Why was he willing to risk his life for the price of a handful of uppers? We're not really sure.
We had a few flashbacks that gave us the picture that he was an adrenaline junkie, and presumably he had led his entire life trying to make it to the vanishing point. That point you see off in the distance where the left and right shoulders of the road come together, and the road itself vanishes. He lives only to be free, and means no ill on anyone. We saw several times when there were accidents he stopped to make sure the other driver was okay before moving on, even the cops that were chasing him.
When he saw the futility of his quest he took his life rather than be arrested and live a life of captivity. He died like he lived, running wide open.
In the remake Kowalski has a whole history (including a first name, even.) He's trying to get to the hospital where his wife is suffering from complications to her pregnancy. He is a devoted husband, and excited expectant father. He comes to the decision to take his life after hearing his wife died in delivery, but they even leave THAT in question when they suggest that he may have jumped out of the car before it ran into the bulldozers. They even gave the part of \"super soul,\" the blind DJ (brilliantly portrayed by Clevon Little in the original) to JASON PRIESTLY?!?!?!?!?!? Give me a break. \n",
+ "pos\t503\tThis is a strong movie from a historical and epic perspective. While the story is simple it is pure and straightforward. In truth, it is the standard story of a simple, honorable man whose honor comes into conflict with the more educated and wealthier men of the period.
Poor vs. Rich, honorable vs. dishonorable, a classic but well-told tale without much of the glitz of hollywood stinking up the screen.
Extra points just because you can almost smell the people on the screen. :) \n",
+ "neg\t353\tWow, a movie about NYC politics seemingly written by someone who has never set foot in NYC. You know there's a problem when at one moment you expect the credits to roll and the movie continues on for another half hour. The characters are boring, John Cusack's accent is laughable, and the plotline teeters between boring and laughable. A horrible movie. \n",
+ "neg\t685\tMy first clue about how bad this was going to be was when the video case said it was from the people who brought us Blair Witch Project which was a masterpiece in comparison to this piece of garbage. The acting was on the caliber of a 6th grade production of Oklahoma and the plot, such as there was, is predictable, boring and inane. 85% of the script is four letter words and innumerable variations on them. Mother F seems to be the \"writer's\" favorite because it is used constantly. It must have taken all of 10 minutes to write this script in some dive at last call. Thank God I rented it and could jump through most of it on fast forward. Don't waste your time or money with this. \n",
+ "neg\t844\t0*'s Christian Slater, Tara Reid, Stephen Dorff, Frank C. Turner, Mathew Walker, Will Sanderson. Directed by Uwe Boll.
Based on the video game director Uwe Boll attempts to recreate the game into an action-packed nail biter sadly he doesn't succeed. Instead he makes one of the worst movies ever MADE! Even though he gets minor celebrity such as Christian Slater, Tara Reid and Stephen Dorff his movie lacks the necessary fundamentals that a movie needs to be good such as a story line, and some basic relativity of what's going on in the dark and the light. The movie bounces all around and Uwe Boll has no creative control. And not to mention the bad CGI used on making the monsters. Even though they did look cool and the feedings were well, a little lame. Honestly this is one of the worst movies ever made. My final rating 0/10. \n",
+ "neg\t821\tSusan Swift is an appealing youngster, a flower child transplanted to the 1980's (like a young Susan Dey), but she doesn't quite have the vocal range for a demanding dramatic lead and she tends to whine; still, she's rather sweet and has bright eyes and a pretty smile. In \"The Coming\" (as it was called when briefly released to theaters), Swift may be the reincarnation of a Salem witch. The flick is very low-budget and borrows from so many other pictures that I gave up on it with about 15 minutes to go. It starts out strong and has some camp appeal. Obviously, there are more serious films that deal with the Salem witch trials that deserve to be seen over this one; however, as junk movies go, it isn't too terrible. The Boston locales are a definite plus, and the supporting cast is amusingly hammy. *1/2 from **** \n",
+ "pos\t730\t\"Against All Flags\" is every bit the classic swashbuckler. It has all the elements the adventure fan could hope for and more for in this one, the damsel in distress is, well, not really in distress. As Spitfire Stevens, Maureen O'Hara is at her athletic best, running her foes through in defiance of the social norms of the period. Anthony Quinn rounds out the top three billed actors as the ruthless Captain Roc Brasiliano and proves to be a wily and capable nemesis for Brian Hawke (Flynn). For the classic adventure fan, \"Against All Flags\" is a must-see. While it may not be in quite the same league as some of Errol Flynn's earlier work (Captain Blood and The Sea Hawk, for instance), it is still a greatly entertaining romp. \n",
+ "\n",
+ "\n",
+ "PyTorchText BuketIterator\n",
+ "\n",
+ "Batch size: 10\n",
+ "\n",
+ "LABEL\tLENGTH\tTEXT\n",
+ "neg\t717\tSix different couples. Six different love stories. Six different love angles. Eighty numbers of audience in the movie theater. Looking at the eighty different parts of the silver screen.
I am sitting in somewhere between them looking at the center of the screen to find out what's going on in the movie. All stories have got no link with each other, but somewhere down the line Nikhil Advani trying to show some relation between them. I tried to find out a few lines I could write as review but at the end of 3 hours 15 minutes found nothing to write. The movie is a poor copy of Hollywood blockbuster LOVE ACTUALLY.
My suggestion. Don't watch the movie if you really want to watch a nice movie. \n",
+ "neg\t718\tMeatball Machine has got to be one of the most complex ridiculous, awful and over-exaggerated sci-fi horror films that I have ever came across. It is about good against evil and a coming-of-age tale, with the aim of to entertain with bloody, sleazy and humorous context. Because of that the violence isn't particularly gruesome and it doesn't make you squirm, but the gratuitous bloodletting and nudity does run freely. The performances by Issei Takahashi and Toru Tezuka is the worst i have seen, if that was not enough it is also directed by an unheard of director called Yudai Yamaguchi. This movie just have it all, it is bad to the bone!, A must see for every b-movie freak!!!... Simply: an enjoying and rare gem. \n",
+ "pos\t718\tWhile I suppose this film could get the rap as being Anti-Vietnam, while watching it I didn't feel that such was the case as much as the film was simply an honest look into the perspective of the young guys being trained for a war that the public didn't support.... it showed their fear, their desperation, their drive... all of it, out in the open, naked. As a soldier myself alot of the themes rang true to me in my experience in the military - especially boot camp. On the whole this movie, although it was shot on a very small budget, looks great, is very well put together, and features excellent acting and directing. I highly recommend this film to anyone looking for another excellent Colin Farrell film. 10/10 \n",
+ "pos\t718\tin one of Neil Simon's best plays. Creaky, cranky ex-Vaudeville stars played by Walter Matthau and George Burns are teaming up for a TV comedy special. The problem is they haven't even SEEN each other in over a decade. Full of zippy one liners and inside showbiz jokes, this story flies along with a steady stream of humor. Good work also by Richard Benjamin as the harried nephew, Rosetta LeNoire as the nurse, and Howard Hesseman as the TV commercial director. Steve Allen and Phyllis Diller appear as themselves. Trivia note: The opening montage contains footage from Hollywood Revue of 1929 and shows Marie Dressler, Bessie Love, Polly Moran, Cliff Edwards, Charles King, Gus Edwards, and the singing Brox Sisters. \n",
+ "neg\t719\tThis show is painful to watch ...
It is obvious that the creators had no clue what to do with this show, from the ever changing \"jobs\", boyfriends, and cast. It appears that they wanted to cast Amanda Bynes in something ... but had no idea what, and came up with this crappy show. They cast her as a teen, surrounded by twenty and thirty somethings, and put her in mostly adult situations at repeatedly failed attempts at comedy. Soon, they realize that she needs a \"clique\" and cast people in their late 20s to try to pass as teenagers.
How this show survived 4 seasons is beyond me. Somehow, ABC has now decided that it is a \"family\" show, and thrown it into it's afternoon lineup on ABC Family. \n",
+ "neg\t720\t***Possible Spoilers***
When I saw this today I had some expectation of how it would be like, not too high, but not low either. This was nothing like I expected at all though, it seems to me like the movie makers couldn't make up their mind of what kind of movie to make.
In the begining of the film it's somewhat mysterious and kinda exciting, but that'll soon change to some ridiculous scenes - very obvious scenes... As I watched further I almost fell asleep a couple of times.
The ending is the most ridiculous of all though, almost splatter/comedy...
I'm not saying it doesn't have some good scenes it's just that the film never becomes \"whole\".
4/10 Movie-Man \n",
+ "pos\t720\tNot only is this a great African-American classic comedy, but one of many great American cult classics.I have recently purchased the collection edition of Rudy Ray Moore.If you love the old school karate movies and black comedies, this is for you! They don't make movies like these anymore. My entire family are movie buffs, so this site is an extreme help on solving many debates. I am deployed in Iraq right now. This helps me to stay connected to world that I know in the states. Thank you IMDb.I recommend this site to all my friends. Dolemite rules! Don't just take my word for it, check them out for yourself. Ten lines is a lot for commenting on one movie I think, but if it gets the point across, I'm all for it! \n",
+ "pos\t720\tthis film needs to be seen. the truest picture of what is going on in the world that I've seen since Darwin's Nightmare. Go see it! and If you're lucky enough to have it open in your city, be sure to see it on the big screen instead of DVD. The writing is sharp and the direction is good enough for the ideas to come through, though hardly perfect. Joan Cusack is amazing, and the rest of the cast is good too. It's inspiring that John Cusack got this movie made, and, I believe, he had to use some of his own money to do it. It's a wild, absurd ride, obviously made without the resources it needed, but still succeeds. Jon Stewart, Steven Colbert, SNL, even Bill Maher haven't shown the guts to say what this film says. \n",
+ "pos\t720\tBell Book and Candle was released in December 1958 and features James Stewart, Kim Novak, Jack Lemmon. and Ernie Kovaks. This film had James Stewart and Kim Novak in their second on-screen pairing (after the Alfred Hitchcock classic Vertigo, released earlier the same year). This was Stewart's last film as a romantic lead as he was deemed too old at age 50 to play that sort of part anymore. The movie is about a witch played by Kim Novak who is attracted to a mortal played by James Stewart. She puts a spell on him and he falls head over heels in love with her. I enjoyed the movie and its cast. This movie at the time was a moderate success which was nominated for a Golden Globe for best Movie Comedy. GimmeClassics \n",
+ "neg\t721\tI bought this movie for 1 euro, not knowing what it was all about. I thought \"hmmm, a movie named mutilation man must be if not very funny at least filled with gore\". It wasn't funny alright. It was disturbing. Very disturbing. And I don't mind disturbing movies but this one just didn't mean anything, except that child abuse is not a good thing to do. hmmm... The quality of the images were terrible. The acting...there was no acting. Just some fcked-up fcker mutilating himself for over 90 minutes. This is probably material for sicko's jurking off on extreme gore.
Don't watch this. It's not worth your time. Its just awful. I wish i never bought this.
They should mutilate the guy who made this \n",
"\n",
"\n"
],
@@ -1054,2563 +3262,95 @@
{
"cell_type": "markdown",
"metadata": {
- "id": "d2QsZ5yN8iDi",
- "colab_type": "text"
+ "id": "d2QsZ5yN8iDi"
},
"source": [
- "# Train Loop Example\n",
+ "### Train Loop Examples\n",
"\n",
- "Now let's print a list of lengths of each sequence in a batch, to see if the **BucketIterator** works as promised.\n",
+ "Now let's look at a model training loop woudl look like. I printed the first 10 batches list of examples lengths to show how nicely they are grouped throughout the dataset!\n",
"\n",
- "We can see how nicely examples of similar length are grouped together by length in a single batch.\n",
- "\n",
- "After each epoch, new batches of similar length are generated when `shuffle=True`.\n",
- "\n",
- "It looks like we have setup everything we need to train a model!"
+ "We see that we get same exact behaviour as we did when using PyTorch Dataset. Now it depends on which way is easier for you to use PyTorchText BucketIterator: with PyTorch Dataset or with PyTorchText TabularDataset"
]
},
{
"cell_type": "code",
"metadata": {
- "id": "mRgTwwHf8j9R",
- "colab_type": "code",
+ "id": "M9WOC2Xe0MQs",
+ "outputId": "37a14a5f-104f-4aef-a889-30f5c78078a1",
"colab": {
- "base_uri": "https://localhost:8080/",
- "height": 1000
- },
- "outputId": "a51bc643-880a-4840-a8d0-5eb141995eda"
+ "base_uri": "https://localhost:8080/"
+ }
},
"source": [
- "# example of number of epochs\n",
+ "# Example of number of epochs.\n",
"epochs = 1\n",
"\n",
- "# loop through each epoch\n",
+ "# Example of loop through each epoch.\n",
"for epoch in range(epochs):\n",
- " # create batches - needs to be called after each epoch\n",
- " train_iterator.create_batches()\n",
"\n",
- " # get each batch\n",
- " for batch in train_iterator.batches:\n",
- " # put all example.text of batch in single array\n",
+ " # Create batches - needs to be called before each loop.\n",
+ " torchtext_train_dataloader.create_batches()\n",
+ "\n",
+ " # Loop through BucketIterator.\n",
+ " for sample_id, batch in enumerate(torchtext_train_dataloader.batches):\n",
+ " # Put all example.text of batch in single array.\n",
" batch_text = [example.text for example in batch]\n",
- " # put all example.label of batch in single array\n",
- " batch_label = [example.label for example in batch]\n",
- " # get maximum sequence length in batch - used for padding\n",
- " max_sequence_len = max([len(text) for text in batch_text])\n",
"\n",
- " # CODE HERE TO FEED BATCHES TO MODEL\n",
- " \n",
- " print([len(text) for text in batch_text])"
+ " print('Batch exmaples lengths: %s'.ljust(20) % str([len(text) for text in batch_text]))\n",
+ "\n",
+ " # Let's break early, you get the idea.\n",
+ " if sample_id == 10:\n",
+ " break"
],
- "execution_count": 11,
+ "execution_count": 76,
"outputs": [
{
"output_type": "stream",
"text": [
- "[200, 200, 200, 201, 201, 203, 203, 203, 203, 203]\n",
- "[127, 127, 127, 127, 127, 127, 128, 128, 128, 128]\n",
- "[213, 214, 214, 214, 216, 216, 216, 216, 217, 217]\n",
- "[238, 238, 239, 240, 240, 241, 241, 241, 241, 241]\n",
- "[197, 197, 198, 198, 198, 198, 199, 199, 200, 200]\n",
- "[354, 355, 355, 357, 359, 360, 360, 360, 360, 361]\n",
- "[251, 252, 256, 257, 258, 259, 260, 260, 260, 260]\n",
- "[148, 148, 148, 148, 148, 148, 149, 149, 149, 149]\n",
- "[122, 122, 122, 122, 123, 123, 123, 123, 123, 123]\n",
- "[223, 223, 226, 227, 227, 228, 228, 229, 229, 229]\n",
- "[89, 89, 90, 91, 92, 93, 94, 94, 94, 94]\n",
- "[131, 131, 132, 132, 132, 132, 132, 133, 133, 133]\n",
- "[133, 133, 133, 133, 134, 134, 134, 134, 135, 135]\n",
- "[188, 188, 189, 189, 190, 190, 190, 192, 192, 192]\n",
- "[174, 175, 175, 175, 176, 176, 176, 176, 177, 177]\n",
- "[51, 52, 52, 52, 53, 53, 54, 54, 56, 56]\n",
- "[168, 169, 169, 169, 170, 170, 170, 170, 170, 171]\n",
- "[278, 278, 278, 279, 280, 281, 281, 281, 282, 283]\n",
- "[451, 453, 454, 454, 458, 464, 466, 467, 468, 468]\n",
- "[402, 403, 403, 404, 405, 407, 409, 410, 411, 411]\n",
- "[5, 22, 28, 29, 34, 34, 34, 36, 36, 40]\n",
- "[109, 109, 109, 110, 110, 110, 110, 110, 110, 111]\n",
- "[192, 193, 193, 193, 193, 194, 195, 196, 197, 197]\n",
- "[111, 111, 111, 112, 112, 112, 112, 112, 112, 113]\n",
- "[431, 433, 436, 436, 437, 440, 441, 442, 443, 449]\n",
- "[554, 555, 556, 561, 569, 570, 573, 573, 578, 580]\n",
- "[171, 171, 172, 172, 172, 173, 174, 174, 174, 174]\n",
- "[144, 144, 144, 144, 145, 145, 145, 146, 146, 146]\n",
- "[791, 794, 795, 801, 825, 843, 872, 930, 936, 957]\n",
- "[70, 70, 71, 72, 72, 72, 72, 72, 73, 73]\n",
- "[293, 293, 295, 295, 296, 297, 297, 298, 299, 299]\n",
- "[165, 165, 165, 165, 165, 166, 166, 166, 166, 167]\n",
- "[146, 147, 147, 147, 148, 148, 148, 148, 148, 148]\n",
- "[63, 64, 64, 65, 65, 65, 66, 67, 67, 69]\n",
- "[218, 218, 218, 218, 219, 219, 219, 220, 220, 222]\n",
- "[285, 286, 286, 286, 287, 287, 287, 288, 290, 290]\n",
- "[523, 525, 531, 533, 538, 539, 541, 542, 545, 548]\n",
- "[361, 361, 362, 362, 363, 366, 366, 367, 369, 369]\n",
- "[587, 591, 593, 601, 601, 602, 606, 606, 611, 618]\n",
- "[392, 392, 393, 394, 395, 395, 397, 400, 401, 402]\n",
- "[161, 161, 161, 162, 162, 162, 163, 163, 163, 164]\n",
- "[57, 58, 58, 59, 59, 60, 61, 62, 62, 63]\n",
- "[113, 113, 113, 113, 113, 113, 114, 114, 114, 114]\n",
- "[119, 119, 119, 119, 119, 120, 120, 120, 120, 120]\n",
- "[156, 156, 156, 156, 157, 157, 157, 157, 158, 158]\n",
- "[490, 491, 498, 506, 507, 510, 511, 515, 517, 522]\n",
- "[101, 101, 101, 102, 102, 102, 102, 102, 103, 103]\n",
- "[230, 230, 231, 231, 232, 232, 233, 233, 233, 234]\n",
- "[129, 129, 129, 130, 130, 130, 130, 130, 130, 131]\n",
- "[126, 126, 126, 126, 127, 127, 127, 127, 127, 127]\n",
- "[121, 121, 121, 121, 121, 121, 121, 121, 122, 122]\n",
- "[181, 181, 181, 181, 182, 182, 182, 183, 183, 183]\n",
- "[99, 99, 99, 100, 100, 100, 100, 100, 101, 101]\n",
- "[78, 78, 78, 78, 80, 80, 80, 81, 82, 82]\n",
- "[334, 334, 335, 335, 336, 338, 338, 339, 340, 341]\n",
- "[177, 178, 178, 179, 179, 179, 179, 180, 180, 180]\n",
- "[308, 308, 308, 310, 311, 312, 313, 313, 313, 314]\n",
- "[48, 48, 48, 50, 50, 50, 50, 50, 50, 51]\n",
- "[103, 104, 104, 104, 105, 106, 106, 106, 106, 106]\n",
- "[209, 209, 209, 210, 210, 211, 211, 212, 212, 212]\n",
- "[267, 267, 267, 270, 270, 271, 273, 273, 274, 275]\n",
- "[659, 660, 663, 674, 677, 686, 687, 693, 695, 701]\n",
- "[142, 142, 142, 143, 143, 143, 143, 143, 144, 144]\n",
- "[149, 149, 149, 150, 150, 150, 150, 151, 151, 151]\n",
- "[140, 140, 140, 141, 141, 142, 142, 142, 142, 142]\n",
- "[84, 84, 85, 85, 85, 86, 87, 88, 88, 88]\n",
- "[325, 327, 328, 328, 329, 331, 332, 332, 332, 333]\n",
- "[411, 413, 414, 414, 417, 420, 423, 423, 426, 427]\n",
- "[261, 261, 262, 264, 264, 265, 265, 266, 266, 266]\n",
- "[40, 42, 42, 42, 43, 45, 46, 47, 47, 47]\n",
- "[73, 74, 75, 75, 75, 76, 76, 77, 77, 77]\n",
- "[291, 291, 291, 291, 292, 292, 292, 292, 293, 293]\n",
- "[203, 204, 204, 204, 204, 205, 205, 205, 205, 206]\n",
- "[206, 206, 207, 207, 208, 208, 209, 209, 209, 209]\n",
- "[342, 342, 343, 343, 343, 344, 350, 351, 352, 354]\n",
- "[300, 301, 303, 303, 303, 304, 305, 305, 305, 307]\n",
- "[128, 128, 128, 128, 128, 128, 128, 128, 129, 129]\n",
- "[138, 139, 139, 139, 139, 140, 140, 140, 140, 140]\n",
- "[123, 124, 124, 124, 124, 125, 125, 125, 125, 125]\n",
- "[373, 375, 378, 380, 380, 381, 382, 384, 389, 390]\n",
- "[242, 243, 243, 244, 244, 244, 244, 245, 245, 246]\n",
- "[167, 167, 167, 168, 168, 168, 168, 168, 168, 168]\n",
- "[314, 314, 315, 315, 316, 317, 319, 320, 322, 323]\n",
- "[135, 135, 135, 135, 135, 136, 136, 136, 136, 137]\n",
- "[469, 471, 471, 473, 477, 479, 481, 484, 485, 485]\n",
- "[184, 184, 184, 184, 184, 185, 185, 185, 185, 186]\n",
- "[137, 137, 137, 137, 137, 137, 138, 138, 138, 138]\n",
- "[247, 247, 247, 248, 248, 248, 249, 251, 251, 251]\n",
- "[959, 962, 975, 979, 989, 999, 999, 1014, 1186, 1192]\n",
- "[234, 234, 235, 235, 236, 236, 236, 236, 237, 237]\n",
- "[107, 107, 107, 107, 108, 108, 108, 108, 108, 109]\n",
- "[116, 116, 117, 117, 117, 117, 117, 117, 117, 117]\n",
- "[702, 709, 715, 750, 755, 764, 765, 771, 773, 789]\n",
- "[159, 159, 159, 159, 160, 160, 160, 160, 160, 161]\n",
- "[619, 623, 625, 628, 629, 633, 644, 650, 653, 659]\n",
- "[187, 187, 187, 187, 187, 187, 188, 188, 188, 188]\n",
- "[114, 115, 115, 115, 115, 116, 116, 116, 116, 116]\n",
- "[95, 95, 95, 96, 96, 96, 97, 97, 97, 98]\n",
- "[154, 154, 154, 154, 154, 154, 155, 155, 155, 155]\n",
- "[152, 152, 152, 153, 153, 153, 153, 153, 153, 153]\n",
- "[143, 143, 143, 144, 144, 144, 144, 144, 144, 144]\n",
- "[182, 182, 182, 182, 182, 183, 184, 184, 184, 185]\n",
- "[223, 223, 223, 223, 223, 224, 225, 225, 225, 226]\n",
- "[256, 256, 257, 258, 258, 259, 259, 260, 261, 265]\n",
- "[57, 58, 58, 59, 59, 59, 59, 60, 60, 60]\n",
- "[501, 502, 504, 504, 504, 508, 512, 514, 517, 519]\n",
- "[272, 274, 275, 275, 276, 276, 277, 278, 278, 279]\n",
- "[101, 102, 103, 103, 103, 104, 104, 104, 105, 105]\n",
- "[279, 280, 280, 280, 280, 282, 282, 283, 286, 287]\n",
- "[755, 789, 789, 791, 809, 815, 819, 829, 834, 855]\n",
- "[165, 165, 166, 166, 166, 166, 166, 166, 167, 167]\n",
- "[356, 357, 357, 358, 358, 359, 360, 361, 361, 361]\n",
- "[865, 881, 890, 898, 908, 961, 962, 972, 990, 1015]\n",
- "[45, 46, 46, 46, 46, 46, 47, 48, 49, 49]\n",
- "[174, 174, 174, 174, 175, 175, 176, 176, 176, 176]\n",
- "[139, 139, 139, 139, 139, 139, 139, 139, 139, 139]\n",
- "[227, 227, 227, 227, 228, 229, 230, 230, 230, 232]\n",
- "[200, 200, 201, 201, 201, 202, 202, 204, 204, 205]\n",
- "[167, 168, 168, 168, 168, 168, 168, 168, 169, 169]\n",
- "[151, 151, 151, 151, 152, 152, 152, 153, 153, 153]\n",
- "[70, 70, 71, 72, 72, 72, 73, 73, 73, 74]\n",
- "[362, 363, 364, 364, 365, 366, 367, 369, 370, 370]\n",
- "[171, 171, 171, 171, 171, 172, 173, 173, 174, 174]\n",
- "[387, 390, 390, 392, 393, 393, 395, 397, 398, 400]\n",
- "[372, 372, 373, 373, 375, 376, 377, 383, 384, 385]\n",
- "[137, 137, 138, 138, 138, 138, 138, 138, 138, 138]\n",
- "[212, 214, 215, 216, 217, 218, 218, 219, 219, 219]\n",
- "[141, 141, 141, 142, 142, 142, 142, 143, 143, 143]\n",
- "[161, 161, 161, 161, 161, 162, 162, 163, 163, 163]\n",
- "[185, 186, 187, 187, 188, 188, 189, 189, 189, 189]\n",
- "[169, 169, 169, 169, 169, 170, 170, 170, 171, 171]\n",
- "[140, 140, 140, 140, 140, 140, 140, 141, 141, 141]\n",
- "[116, 116, 116, 116, 116, 117, 117, 117, 117, 117]\n",
- "[28, 30, 33, 35, 35, 36, 36, 39, 40, 41]\n",
- "[125, 125, 125, 126, 126, 126, 126, 126, 126, 126]\n",
- "[158, 158, 159, 159, 160, 160, 160, 160, 161, 161]\n",
- "[344, 345, 345, 348, 349, 351, 355, 355, 355, 356]\n",
- "[304, 305, 307, 307, 308, 308, 308, 308, 311, 311]\n",
- "[418, 418, 418, 419, 423, 426, 427, 429, 432, 433]\n",
- "[534, 540, 543, 546, 547, 548, 552, 560, 563, 565]\n",
- "[520, 521, 522, 523, 523, 524, 525, 526, 527, 528]\n",
- "[75, 76, 77, 77, 77, 78, 78, 78, 79, 80]\n",
- "[435, 436, 437, 437, 438, 440, 448, 450, 450, 456]\n",
- "[114, 115, 115, 115, 115, 115, 116, 116, 116, 116]\n",
- "[155, 155, 155, 155, 155, 155, 155, 155, 155, 155]\n",
- "[476, 478, 481, 485, 487, 489, 491, 495, 497, 500]\n",
- "[87, 87, 88, 88, 88, 88, 89, 89, 90, 90]\n",
- "[457, 458, 461, 464, 465, 467, 471, 472, 475, 476]\n",
- "[618, 620, 622, 624, 630, 632, 633, 638, 643, 646]\n",
- "[590, 591, 592, 597, 602, 603, 612, 615, 616, 617]\n",
- "[97, 97, 98, 98, 99, 99, 100, 100, 101, 101]\n",
- "[133, 133, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[332, 333, 337, 337, 339, 340, 342, 342, 343, 344]\n",
- "[110, 110, 110, 111, 112, 112, 112, 112, 112, 113]\n",
- "[80, 81, 82, 82, 83, 83, 83, 84, 84, 85]\n",
- "[568, 569, 571, 577, 581, 583, 583, 585, 587, 588]\n",
- "[51, 51, 51, 53, 54, 54, 55, 55, 56, 56]\n",
- "[153, 153, 154, 154, 154, 154, 154, 154, 154, 154]\n",
- "[156, 156, 156, 157, 157, 157, 157, 157, 158, 158]\n",
- "[403, 404, 407, 408, 409, 410, 410, 412, 412, 413]\n",
- "[119, 119, 119, 119, 119, 120, 120, 120, 121, 121]\n",
- "[90, 91, 92, 92, 92, 92, 94, 95, 96, 97]\n",
- "[124, 124, 124, 124, 124, 124, 124, 125, 125, 125]\n",
- "[323, 324, 326, 327, 327, 328, 329, 329, 329, 330]\n",
- "[121, 121, 121, 121, 121, 122, 122, 122, 122, 122]\n",
- "[113, 113, 113, 113, 113, 114, 114, 114, 114, 114]\n",
- "[219, 219, 219, 219, 220, 221, 222, 222, 222, 223]\n",
- "[266, 266, 267, 267, 268, 268, 270, 270, 271, 272]\n",
- "[128, 128, 128, 128, 128, 128, 128, 129, 129, 129]\n",
- "[232, 233, 233, 233, 234, 234, 234, 235, 235, 235]\n",
- "[252, 253, 253, 253, 253, 254, 255, 255, 256, 256]\n",
- "[144, 145, 145, 146, 146, 146, 146, 146, 146, 146]\n",
- "[205, 206, 207, 207, 207, 207, 207, 208, 208, 208]\n",
- "[146, 146, 146, 146, 146, 147, 148, 148, 148, 148]\n",
- "[105, 107, 107, 108, 108, 108, 109, 110, 110, 110]\n",
- "[149, 149, 149, 149, 149, 149, 150, 150, 150, 151]\n",
- "[208, 208, 209, 209, 209, 210, 211, 211, 212, 212]\n",
- "[313, 313, 314, 315, 315, 320, 321, 321, 323, 323]\n",
- "[131, 131, 132, 132, 132, 132, 132, 132, 132, 133]\n",
- "[126, 126, 126, 126, 126, 126, 127, 127, 128, 128]\n",
- "[41, 42, 42, 42, 42, 43, 43, 43, 44, 44]\n",
- "[163, 163, 163, 163, 164, 164, 165, 165, 165, 165]\n",
- "[288, 288, 289, 290, 291, 291, 291, 291, 292, 293]\n",
- "[293, 296, 296, 297, 298, 299, 299, 301, 301, 301]\n",
- "[176, 177, 177, 177, 178, 179, 179, 180, 181, 181]\n",
- "[122, 122, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[134, 134, 134, 134, 135, 135, 135, 135, 135, 135]\n",
- "[197, 197, 198, 198, 198, 198, 198, 199, 199, 199]\n",
- "[135, 135, 135, 135, 135, 135, 135, 136, 136, 136]\n",
- "[190, 190, 191, 191, 191, 191, 192, 192, 192, 192]\n",
- "[668, 675, 680, 689, 696, 700, 720, 720, 732, 738]\n",
- "[117, 117, 117, 117, 118, 118, 118, 119, 119, 119]\n",
- "[193, 193, 193, 194, 194, 196, 196, 196, 196, 196]\n",
- "[60, 60, 61, 61, 62, 62, 62, 63, 63, 64]\n",
- "[236, 236, 237, 238, 238, 239, 240, 240, 241, 242]\n",
- "[129, 129, 129, 129, 129, 130, 130, 130, 130, 130]\n",
- "[243, 243, 243, 243, 243, 244, 244, 245, 245, 245]\n",
- "[136, 136, 136, 136, 136, 137, 137, 137, 137, 137]\n",
- "[245, 246, 246, 247, 248, 249, 249, 250, 251, 251]\n",
- "[65, 65, 66, 66, 66, 68, 68, 68, 69, 70]\n",
- "[102, 102, 103, 104, 105, 106, 106, 106, 107, 107]\n",
- "[150, 151, 151, 151, 151, 151, 152, 152, 152, 152]\n",
- "[220, 220, 221, 221, 221, 222, 223, 223, 224, 224]\n",
- "[252, 252, 256, 256, 256, 257, 260, 261, 262, 262]\n",
- "[244, 245, 245, 247, 248, 248, 249, 250, 251, 251]\n",
- "[53, 53, 53, 53, 55, 55, 56, 56, 56, 57]\n",
- "[146, 146, 146, 146, 146, 146, 146, 146, 147, 147]\n",
- "[179, 179, 179, 180, 180, 180, 180, 180, 181, 181]\n",
- "[122, 122, 122, 122, 123, 123, 123, 123, 123, 123]\n",
- "[182, 183, 183, 184, 184, 184, 185, 185, 186, 186]\n",
- "[131, 131, 131, 131, 131, 131, 131, 131, 131, 132]\n",
- "[189, 189, 189, 190, 191, 191, 191, 191, 191, 191]\n",
- "[197, 197, 197, 197, 198, 199, 199, 199, 199, 200]\n",
- "[200, 201, 201, 201, 202, 202, 202, 203, 203, 204]\n",
- "[92, 93, 94, 94, 95, 95, 96, 96, 96, 97]\n",
- "[398, 399, 402, 404, 407, 409, 409, 410, 410, 410]\n",
- "[166, 167, 167, 167, 167, 167, 168, 168, 168, 168]\n",
- "[569, 572, 582, 582, 584, 587, 594, 598, 599, 605]\n",
- "[325, 325, 326, 326, 327, 328, 328, 332, 332, 334]\n",
- "[498, 504, 505, 510, 511, 514, 518, 519, 522, 523]\n",
- "[204, 206, 206, 206, 206, 206, 208, 208, 208, 209]\n",
- "[900, 902, 933, 937, 942, 949, 966, 999, 999, 1000]\n",
- "[342, 343, 343, 346, 347, 349, 351, 352, 353, 354]\n",
- "[354, 355, 355, 355, 355, 356, 357, 358, 358, 359]\n",
- "[115, 115, 116, 116, 116, 116, 116, 116, 116, 116]\n",
- "[111, 112, 112, 112, 112, 112, 113, 113, 113, 113]\n",
- "[191, 192, 192, 192, 193, 193, 194, 194, 194, 194]\n",
- "[414, 415, 415, 415, 418, 418, 420, 420, 421, 423]\n",
- "[163, 163, 163, 163, 163, 164, 164, 164, 164, 164]\n",
- "[210, 210, 211, 211, 212, 212, 212, 212, 214, 214]\n",
- "[283, 283, 284, 285, 285, 285, 286, 286, 286, 288]\n",
- "[237, 237, 237, 237, 238, 238, 239, 239, 239, 239]\n",
- "[273, 273, 273, 273, 274, 274, 276, 276, 277, 277]\n",
- "[457, 458, 460, 462, 462, 465, 466, 471, 471, 474]\n",
- "[770, 777, 799, 811, 813, 820, 854, 878, 884, 887]\n",
- "[441, 442, 443, 444, 444, 445, 453, 454, 454, 457]\n",
- "[127, 128, 128, 128, 128, 128, 128, 128, 128, 129]\n",
- "[133, 134, 134, 134, 134, 134, 134, 134, 134, 135]\n",
- "[644, 644, 646, 657, 662, 665, 672, 685, 688, 704]\n",
- "[57, 59, 60, 60, 61, 61, 63, 63, 64, 66]\n",
- "[194, 195, 195, 195, 196, 196, 196, 196, 196, 197]\n",
- "[239, 240, 241, 243, 243, 243, 244, 244, 244, 244]\n",
- "[132, 132, 132, 132, 132, 132, 132, 132, 133, 133]\n",
- "[119, 119, 119, 119, 119, 119, 119, 119, 119, 119]\n",
- "[142, 142, 142, 142, 143, 143, 143, 144, 144, 144]\n",
- "[144, 144, 144, 144, 144, 145, 145, 145, 145, 145]\n",
- "[171, 171, 171, 171, 172, 173, 173, 173, 174, 174]\n",
- "[123, 123, 124, 124, 124, 124, 124, 124, 124, 125]\n",
- "[136, 136, 136, 137, 137, 137, 137, 137, 137, 137]\n",
- "[155, 155, 155, 155, 155, 155, 155, 155, 156, 156]\n",
- "[156, 156, 156, 157, 157, 157, 158, 159, 159, 160]\n",
- "[126, 126, 126, 126, 126, 127, 127, 127, 127, 127]\n",
- "[12, 27, 32, 34, 34, 36, 40, 41, 42, 43]\n",
- "[262, 263, 265, 266, 267, 267, 267, 267, 268, 268]\n",
- "[175, 175, 175, 176, 177, 177, 178, 178, 178, 179]\n",
- "[67, 67, 68, 69, 69, 69, 71, 71, 72, 72]\n",
- "[360, 360, 361, 361, 362, 362, 363, 363, 363, 364]\n",
- "[268, 268, 269, 269, 270, 270, 271, 271, 272, 272]\n",
- "[72, 72, 72, 74, 74, 75, 76, 78, 78, 78]\n",
- "[140, 140, 140, 140, 140, 140, 141, 141, 141, 141]\n",
- "[526, 526, 527, 528, 530, 531, 534, 548, 551, 560]\n",
- "[425, 425, 427, 428, 433, 435, 435, 438, 438, 439]\n",
- "[119, 120, 120, 120, 120, 121, 121, 121, 121, 121]\n",
- "[365, 366, 366, 366, 366, 366, 369, 369, 370, 372]\n",
- "[710, 721, 723, 725, 728, 735, 746, 747, 749, 766]\n",
- "[388, 388, 388, 388, 389, 390, 394, 395, 395, 397]\n",
- "[224, 225, 225, 225, 226, 226, 227, 227, 227, 227]\n",
- "[165, 165, 165, 166, 166, 166, 166, 166, 166, 166]\n",
- "[160, 160, 161, 161, 161, 162, 162, 162, 162, 162]\n",
- "[125, 125, 125, 125, 125, 126, 126, 126, 126, 126]\n",
- "[129, 129, 129, 129, 129, 129, 129, 129, 130, 130]\n",
- "[107, 108, 108, 108, 108, 108, 108, 109, 109, 109]\n",
- "[43, 44, 45, 45, 47, 47, 47, 49, 49, 49]\n",
- "[168, 169, 169, 169, 169, 169, 169, 170, 170, 170]\n",
- "[86, 86, 87, 87, 88, 88, 88, 88, 89, 92]\n",
- "[152, 153, 153, 153, 153, 154, 154, 154, 154, 155]\n",
- "[98, 98, 98, 99, 99, 100, 100, 100, 101, 101]\n",
- "[137, 137, 137, 138, 138, 138, 139, 139, 140, 140]\n",
- "[476, 477, 481, 482, 482, 484, 484, 486, 486, 490]\n",
- "[117, 117, 117, 117, 117, 118, 118, 118, 118, 118]\n",
- "[114, 114, 115, 115, 115, 115, 115, 115, 115, 115]\n",
- "[607, 614, 620, 621, 621, 625, 626, 638, 638, 641]\n",
- "[109, 110, 110, 110, 110, 111, 111, 111, 111, 111]\n",
- "[79, 80, 81, 81, 82, 83, 83, 83, 85, 85]\n",
- "[318, 320, 321, 322, 322, 322, 324, 324, 324, 324]\n",
- "[135, 135, 135, 135, 135, 135, 136, 136, 136, 136]\n",
- "[228, 228, 228, 229, 229, 229, 230, 230, 230, 232]\n",
- "[214, 215, 216, 216, 216, 216, 216, 217, 218, 219]\n",
- "[375, 378, 379, 379, 380, 381, 382, 384, 385, 387]\n",
- "[181, 181, 181, 182, 182, 182, 182, 182, 182, 182]\n",
- "[304, 305, 306, 309, 309, 315, 315, 316, 317, 317]\n",
- "[277, 278, 278, 278, 279, 279, 279, 279, 280, 280]\n",
- "[232, 232, 232, 232, 233, 233, 234, 235, 235, 236]\n",
- "[288, 292, 294, 295, 300, 300, 302, 303, 303, 303]\n",
- "[147, 147, 147, 147, 147, 147, 148, 148, 148, 148]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 130, 131]\n",
- "[51, 51, 51, 51, 51, 52, 53, 53, 53, 53]\n",
- "[187, 187, 187, 187, 187, 188, 188, 188, 188, 188]\n",
- "[335, 335, 336, 337, 338, 339, 340, 340, 340, 341]\n",
- "[149, 149, 149, 149, 149, 149, 149, 149, 150, 150]\n",
- "[128, 128, 128, 128, 128, 128, 128, 129, 129, 129]\n",
- "[164, 164, 164, 164, 165, 165, 166, 166, 166, 166]\n",
- "[202, 203, 203, 203, 203, 204, 204, 205, 205, 205]\n",
- "[133, 133, 133, 133, 133, 134, 134, 135, 135, 135]\n",
- "[964, 968, 970, 976, 989, 991, 998, 998, 1003, 1051]\n",
- "[122, 122, 122, 122, 122, 122, 123, 123, 123, 123]\n",
- "[106, 107, 107, 107, 108, 108, 108, 108, 108, 108]\n",
- "[150, 150, 150, 150, 151, 151, 151, 151, 152, 152]\n",
- "[211, 211, 211, 212, 213, 213, 213, 213, 214, 215]\n",
- "[126, 127, 127, 127, 127, 127, 127, 128, 128, 128]\n",
- "[125, 125, 125, 126, 126, 126, 126, 126, 126, 126]\n",
- "[632, 645, 660, 661, 667, 677, 685, 700, 706, 710]\n",
- "[61, 62, 62, 62, 62, 63, 63, 63, 63, 63]\n",
- "[172, 173, 173, 173, 174, 174, 174, 174, 174, 175]\n",
- "[82, 82, 82, 82, 82, 82, 83, 83, 84, 84]\n",
- "[365, 367, 368, 368, 374, 376, 377, 377, 379, 381]\n",
- "[187, 187, 187, 188, 188, 188, 189, 189, 189, 189]\n",
- "[249, 249, 251, 251, 251, 251, 251, 251, 252, 253]\n",
- "[140, 140, 141, 141, 141, 141, 141, 141, 141, 141]\n",
- "[112, 113, 113, 113, 113, 113, 113, 114, 114, 114]\n",
- "[120, 120, 120, 120, 120, 120, 120, 120, 121, 121]\n",
- "[396, 398, 399, 402, 405, 406, 406, 406, 406, 410]\n",
- "[135, 135, 135, 135, 135, 136, 136, 136, 136, 136]\n",
- "[25, 30, 37, 37, 38, 38, 39, 39, 40, 42]\n",
- "[474, 476, 477, 482, 483, 484, 491, 492, 497, 498]\n",
- "[158, 158, 158, 158, 158, 159, 159, 160, 160, 160]\n",
- "[827, 832, 849, 852, 863, 863, 892, 939, 945, 957]\n",
- "[52, 52, 53, 53, 53, 54, 54, 55, 55, 55]\n",
- "[170, 170, 170, 171, 171, 171, 171, 172, 172, 172]\n",
- "[180, 180, 181, 181, 182, 183, 184, 184, 184, 185]\n",
- "[144, 144, 144, 145, 145, 145, 145, 145, 146, 146]\n",
- "[309, 311, 312, 313, 313, 313, 314, 314, 320, 320]\n",
- "[175, 175, 175, 176, 176, 176, 176, 177, 177, 178]\n",
- "[116, 116, 116, 117, 117, 117, 117, 117, 117, 117]\n",
- "[109, 109, 109, 109, 110, 110, 110, 110, 110, 111]\n",
- "[118, 118, 118, 118, 118, 119, 119, 119, 120, 120]\n",
- "[206, 207, 207, 207, 208, 208, 209, 209, 211, 211]\n",
- "[272, 272, 273, 274, 274, 275, 275, 276, 277, 278]\n",
- "[146, 146, 146, 146, 146, 147, 147, 147, 147, 147]\n",
- "[155, 155, 156, 156, 156, 157, 157, 157, 157, 158]\n",
- "[321, 322, 323, 323, 325, 325, 325, 326, 326, 327]\n",
- "[94, 95, 95, 96, 96, 98, 99, 99, 99, 99]\n",
- "[114, 114, 114, 115, 115, 115, 115, 115, 115, 115]\n",
- "[338, 339, 339, 339, 340, 342, 342, 343, 345, 346]\n",
- "[123, 124, 124, 124, 124, 124, 124, 124, 125, 125]\n",
- "[234, 234, 235, 236, 237, 238, 238, 239, 239, 239]\n",
- "[297, 298, 298, 300, 300, 301, 301, 302, 303, 303]\n",
- "[84, 86, 88, 88, 89, 90, 90, 90, 90, 90]\n",
- "[499, 503, 504, 505, 506, 509, 516, 522, 523, 532]\n",
- "[543, 553, 580, 583, 586, 586, 587, 587, 590, 590]\n",
- "[328, 330, 332, 332, 335, 335, 336, 336, 336, 337]\n",
- "[191, 192, 192, 192, 193, 193, 193, 193, 194, 194]\n",
- "[99, 100, 101, 101, 101, 101, 102, 102, 102, 103]\n",
- "[264, 264, 266, 266, 266, 267, 267, 267, 268, 271]\n",
- "[424, 428, 429, 431, 431, 433, 435, 435, 437, 437]\n",
- "[288, 288, 290, 290, 292, 293, 294, 295, 296, 297]\n",
- "[185, 185, 185, 185, 186, 186, 186, 186, 187, 187]\n",
- "[121, 121, 121, 121, 121, 121, 121, 122, 122, 122]\n",
- "[130, 130, 130, 130, 130, 131, 131, 131, 131, 131]\n",
- "[219, 219, 220, 221, 221, 221, 222, 223, 224, 224]\n",
- "[215, 215, 216, 216, 216, 217, 217, 218, 218, 219]\n",
- "[111, 111, 111, 111, 112, 112, 112, 112, 112, 112]\n",
- "[724, 724, 735, 745, 760, 788, 798, 801, 807, 813]\n",
- "[240, 240, 240, 241, 242, 242, 243, 245, 246, 246]\n",
- "[129, 129, 129, 129, 129, 129, 129, 130, 130, 130]\n",
- "[103, 103, 104, 104, 104, 104, 105, 105, 105, 106]\n",
- "[91, 91, 91, 92, 92, 92, 93, 93, 94, 94]\n",
- "[443, 443, 446, 452, 452, 453, 456, 457, 458, 458]\n",
- "[224, 226, 226, 227, 227, 228, 228, 229, 229, 229]\n",
- "[43, 47, 48, 49, 49, 49, 49, 49, 50, 51]\n",
- "[229, 229, 230, 230, 231, 231, 231, 231, 231, 232]\n",
- "[279, 279, 280, 281, 282, 283, 285, 285, 287, 287]\n",
- "[195, 195, 195, 196, 196, 196, 196, 197, 197, 198]\n",
- "[459, 459, 461, 461, 462, 467, 467, 468, 470, 473]\n",
- "[136, 137, 137, 137, 137, 138, 138, 138, 138, 138]\n",
- "[254, 254, 255, 255, 255, 255, 255, 256, 256, 256]\n",
- "[64, 65, 66, 67, 68, 68, 70, 70, 70, 70]\n",
- "[199, 200, 200, 200, 200, 201, 201, 201, 201, 202]\n",
- "[141, 142, 142, 142, 142, 142, 143, 143, 143, 143]\n",
- "[154, 154, 154, 154, 154, 154, 155, 155, 155, 155]\n",
- "[178, 178, 178, 178, 178, 178, 179, 179, 179, 179]\n",
- "[232, 232, 232, 232, 233, 233, 233, 233, 233, 234]\n",
- "[152, 152, 153, 153, 153, 153, 153, 153, 153, 153]\n",
- "[162, 163, 163, 163, 163, 163, 163, 163, 164, 164]\n",
- "[304, 305, 306, 306, 306, 306, 306, 307, 308, 308]\n",
- "[138, 138, 139, 139, 139, 139, 139, 139, 140, 140]\n",
- "[161, 161, 161, 161, 162, 162, 162, 162, 162, 162]\n",
- "[76, 77, 77, 78, 78, 79, 80, 80, 81, 81]\n",
- "[166, 167, 167, 168, 168, 168, 168, 169, 169, 170]\n",
- "[596, 598, 598, 599, 609, 611, 617, 621, 622, 627]\n",
- "[131, 131, 131, 132, 132, 132, 132, 132, 132, 133]\n",
- "[356, 360, 360, 361, 363, 363, 363, 363, 365, 365]\n",
- "[348, 348, 350, 351, 351, 351, 351, 354, 355, 355]\n",
- "[382, 383, 385, 386, 388, 388, 389, 392, 393, 393]\n",
- "[71, 71, 71, 72, 73, 73, 75, 75, 76, 76]\n",
- "[148, 148, 148, 148, 148, 149, 149, 149, 149, 150]\n",
- "[56, 56, 57, 57, 58, 58, 58, 59, 60, 60]\n",
- "[257, 258, 259, 259, 259, 260, 260, 260, 262, 263]\n",
- "[410, 411, 411, 414, 415, 415, 416, 420, 421, 423]\n",
- "[189, 189, 190, 190, 191, 191, 191, 191, 191, 191]\n",
- "[155, 155, 156, 156, 156, 156, 156, 157, 157, 157]\n",
- "[326, 326, 326, 327, 329, 329, 330, 330, 331, 331]\n",
- "[139, 139, 139, 140, 140, 140, 140, 140, 141, 141]\n",
- "[110, 111, 111, 112, 112, 112, 112, 112, 112, 113]\n",
- "[261, 261, 262, 262, 263, 263, 264, 265, 265, 266]\n",
- "[141, 141, 141, 142, 142, 142, 142, 142, 142, 143]\n",
- "[83, 83, 83, 84, 84, 84, 86, 87, 87, 88]\n",
- "[220, 221, 222, 222, 222, 222, 223, 223, 223, 224]\n",
- "[79, 79, 80, 80, 80, 81, 82, 82, 83, 83]\n",
- "[125, 126, 126, 126, 126, 126, 126, 126, 126, 127]\n",
- "[115, 116, 116, 116, 117, 117, 117, 117, 117, 117]\n",
- "[230, 232, 232, 232, 233, 233, 233, 233, 234, 234]\n",
- "[212, 212, 213, 213, 213, 213, 214, 214, 215, 215]\n",
- "[153, 153, 153, 154, 154, 154, 154, 154, 155, 155]\n",
- "[439, 440, 445, 446, 447, 448, 450, 450, 450, 453]\n",
- "[118, 118, 119, 119, 119, 119, 119, 119, 120, 120]\n",
- "[286, 287, 287, 287, 289, 289, 289, 290, 291, 291]\n",
- "[117, 117, 117, 118, 118, 118, 118, 118, 118, 118]\n",
- "[277, 277, 278, 278, 280, 280, 283, 283, 284, 285]\n",
- "[355, 356, 359, 360, 362, 364, 366, 368, 369, 370]\n",
- "[216, 216, 216, 218, 218, 218, 218, 219, 219, 220]\n",
- "[337, 339, 341, 342, 344, 345, 345, 345, 346, 347]\n",
- "[253, 254, 255, 256, 256, 257, 258, 258, 260, 261]\n",
- "[144, 144, 144, 144, 145, 145, 145, 145, 145, 145]\n",
- "[10, 25, 33, 36, 37, 41, 41, 41, 43, 43]\n",
- "[72, 74, 74, 75, 76, 76, 76, 76, 76, 78]\n",
- "[202, 202, 203, 203, 203, 204, 204, 204, 205, 205]\n",
- "[58, 59, 60, 60, 61, 62, 62, 63, 63, 64]\n",
- "[538, 543, 544, 546, 548, 551, 552, 557, 558, 563]\n",
- "[563, 564, 568, 570, 576, 576, 580, 587, 588, 589]\n",
- "[247, 248, 248, 249, 251, 251, 252, 253, 253, 253]\n",
- "[208, 208, 209, 209, 209, 210, 211, 211, 211, 212]\n",
- "[306, 306, 307, 307, 308, 309, 312, 312, 313, 315]\n",
- "[162, 162, 162, 162, 162, 163, 163, 164, 164, 164]\n",
- "[168, 168, 168, 168, 168, 169, 169, 169, 169, 170]\n",
- "[113, 113, 114, 114, 114, 114, 115, 115, 115, 115]\n",
- "[183, 183, 183, 184, 184, 184, 184, 185, 185, 185]\n",
- "[127, 127, 127, 127, 127, 127, 127, 127, 128, 128]\n",
- "[318, 320, 321, 321, 322, 322, 322, 323, 323, 324]\n",
- "[296, 302, 302, 302, 302, 302, 303, 304, 305, 305]\n",
- "[124, 124, 124, 124, 124, 124, 124, 124, 124, 124]\n",
- "[137, 137, 137, 137, 137, 138, 138, 138, 138, 139]\n",
- "[48, 49, 49, 50, 50, 50, 50, 51, 51, 51]\n",
- "[235, 235, 236, 237, 238, 239, 240, 241, 241, 242]\n",
- "[133, 134, 134, 135, 135, 135, 135, 135, 135, 136]\n",
- "[89, 89, 89, 89, 90, 91, 91, 92, 93, 94]\n",
- "[165, 166, 166, 166, 166, 166, 167, 167, 167, 167]\n",
- "[54, 54, 55, 55, 56, 56, 56, 57, 57, 58]\n",
- "[109, 109, 109, 109, 109, 109, 110, 110, 110, 110]\n",
- "[205, 206, 206, 207, 207, 207, 207, 207, 207, 208]\n",
- "[150, 150, 150, 150, 151, 151, 151, 151, 152, 152]\n",
- "[350, 352, 352, 352, 353, 353, 354, 354, 355, 355]\n",
- "[107, 107, 107, 107, 107, 108, 108, 109, 109, 109]\n",
- "[273, 273, 273, 275, 275, 276, 276, 276, 276, 277]\n",
- "[121, 121, 122, 122, 122, 122, 122, 122, 122, 122]\n",
- "[194, 194, 194, 195, 195, 196, 196, 196, 197, 197]\n",
- "[266, 267, 267, 268, 269, 269, 269, 271, 272, 273]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 123, 123]\n",
- "[159, 160, 160, 160, 160, 161, 161, 162, 162, 162]\n",
- "[370, 370, 371, 372, 373, 373, 374, 374, 375, 376]\n",
- "[128, 128, 128, 129, 129, 129, 129, 129, 130, 130]\n",
- "[186, 186, 187, 188, 190, 190, 190, 190, 190, 191]\n",
- "[51, 52, 52, 52, 52, 52, 53, 53, 54, 54]\n",
- "[377, 379, 380, 383, 383, 385, 385, 387, 388, 390]\n",
- "[224, 226, 226, 227, 227, 227, 227, 228, 229, 230]\n",
- "[331, 332, 332, 333, 334, 334, 335, 336, 337, 337]\n",
- "[43, 44, 44, 44, 44, 46, 46, 47, 47, 48]\n",
- "[170, 170, 170, 170, 170, 171, 171, 171, 171, 172]\n",
- "[136, 136, 136, 136, 136, 136, 137, 137, 137, 137]\n",
- "[421, 423, 424, 427, 428, 435, 437, 437, 438, 438]\n",
- "[124, 124, 125, 125, 125, 125, 125, 125, 125, 125]\n",
- "[824, 825, 828, 841, 903, 913, 916, 937, 974, 1000]\n",
- "[243, 244, 244, 245, 245, 245, 246, 246, 246, 246]\n",
- "[94, 95, 95, 95, 95, 96, 97, 97, 98, 98]\n",
- "[191, 191, 191, 191, 191, 193, 193, 193, 193, 194]\n",
- "[178, 178, 179, 179, 180, 181, 181, 181, 181, 182]\n",
- "[120, 120, 120, 120, 120, 120, 121, 121, 121, 121]\n",
- "[392, 392, 392, 396, 398, 401, 403, 403, 406, 406]\n",
- "[632, 632, 634, 634, 636, 645, 651, 658, 658, 664]\n",
- "[743, 758, 763, 767, 771, 774, 775, 777, 782, 805]\n",
- "[132, 132, 132, 132, 132, 132, 132, 132, 132, 133]\n",
- "[103, 103, 104, 105, 105, 105, 105, 106, 107, 107]\n",
- "[130, 130, 130, 130, 130, 131, 131, 131, 131, 131]\n",
- "[485, 486, 490, 492, 496, 496, 501, 503, 508, 513]\n",
- "[152, 152, 152, 152, 152, 152, 152, 153, 153, 153]\n",
- "[143, 143, 143, 143, 143, 143, 143, 144, 144, 144]\n",
- "[65, 67, 68, 68, 70, 70, 70, 71, 71, 71]\n",
- "[197, 198, 198, 198, 199, 200, 201, 201, 201, 201]\n",
- "[675, 689, 690, 697, 712, 721, 728, 732, 735, 737]\n",
- "[172, 172, 172, 173, 173, 173, 174, 174, 174, 174]\n",
- "[145, 146, 146, 146, 146, 147, 147, 147, 147, 147]\n",
- "[133, 133, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[99, 99, 100, 100, 100, 101, 101, 102, 102, 103]\n",
- "[157, 158, 158, 158, 158, 159, 159, 159, 159, 159]\n",
- "[453, 455, 455, 464, 466, 467, 474, 476, 476, 484]\n",
- "[591, 593, 593, 595, 605, 610, 612, 620, 622, 624]\n",
- "[174, 175, 175, 175, 176, 176, 177, 177, 177, 177]\n",
- "[147, 148, 148, 148, 149, 149, 149, 150, 150, 150]\n",
- "[407, 409, 409, 411, 411, 412, 414, 415, 415, 418]\n",
- "[514, 515, 517, 521, 527, 533, 533, 534, 536, 538]\n",
- "[98, 99, 99, 100, 101, 101, 102, 103, 103, 103]\n",
- "[129, 129, 129, 129, 129, 130, 130, 130, 130, 131]\n",
- "[87, 87, 88, 89, 90, 91, 91, 92, 93, 94]\n",
- "[295, 295, 295, 296, 298, 300, 300, 300, 301, 303]\n",
- "[199, 199, 199, 199, 200, 200, 200, 200, 200, 201]\n",
- "[399, 400, 400, 401, 402, 403, 406, 408, 408, 408]\n",
- "[140, 140, 140, 140, 141, 141, 141, 141, 141, 141]\n",
- "[81, 81, 82, 83, 84, 84, 85, 85, 86, 86]\n",
- "[390, 390, 391, 391, 392, 393, 393, 396, 398, 398]\n",
- "[635, 645, 646, 648, 650, 652, 654, 657, 668, 669]\n",
- "[231, 231, 232, 232, 232, 233, 234, 234, 235, 235]\n",
- "[219, 219, 219, 221, 223, 224, 224, 224, 225, 225]\n",
- "[670, 686, 697, 703, 712, 721, 727, 729, 730, 735]\n",
- "[76, 76, 77, 78, 79, 79, 79, 79, 80, 80]\n",
- "[338, 338, 338, 338, 340, 341, 342, 342, 342, 342]\n",
- "[165, 166, 166, 166, 167, 167, 167, 167, 167, 168]\n",
- "[106, 107, 107, 107, 107, 108, 108, 108, 108, 108]\n",
- "[144, 144, 144, 145, 145, 145, 146, 146, 146, 146]\n",
- "[486, 489, 489, 494, 495, 499, 500, 500, 501, 502]\n",
- "[114, 115, 115, 115, 115, 115, 115, 115, 116, 116]\n",
- "[174, 174, 175, 175, 175, 176, 176, 176, 177, 177]\n",
- "[186, 186, 187, 187, 187, 187, 188, 188, 188, 188]\n",
- "[141, 141, 142, 142, 142, 142, 142, 142, 142, 143]\n",
- "[54, 55, 55, 55, 56, 56, 56, 56, 57, 57]\n",
- "[170, 170, 170, 170, 170, 170, 170, 171, 171, 172]\n",
- "[126, 126, 126, 126, 126, 127, 127, 127, 127, 127]\n",
- "[138, 139, 139, 139, 139, 140, 140, 140, 140, 140]\n",
- "[520, 520, 521, 527, 530, 530, 545, 548, 554, 561]\n",
- "[137, 137, 137, 137, 138, 138, 138, 138, 138, 138]\n",
- "[120, 120, 120, 120, 120, 120, 120, 120, 121, 121]\n",
- "[446, 448, 452, 453, 455, 460, 461, 463, 465, 469]\n",
- "[239, 239, 239, 240, 241, 241, 241, 241, 242, 243]\n",
- "[351, 354, 354, 355, 355, 356, 356, 356, 357, 358]\n",
- "[161, 162, 162, 162, 162, 162, 163, 163, 163, 163]\n",
- "[103, 103, 103, 104, 104, 104, 104, 105, 105, 105]\n",
- "[409, 412, 414, 416, 416, 420, 421, 422, 424, 425]\n",
- "[152, 152, 153, 153, 153, 153, 153, 154, 154, 154]\n",
- "[127, 127, 127, 128, 128, 128, 128, 128, 128, 128]\n",
- "[373, 373, 373, 374, 374, 374, 375, 377, 377, 379]\n",
- "[358, 359, 360, 361, 362, 362, 365, 367, 369, 371]\n",
- "[118, 118, 118, 118, 118, 118, 118, 118, 118, 118]\n",
- "[180, 180, 180, 181, 181, 181, 182, 182, 182, 182]\n",
- "[116, 116, 117, 117, 117, 117, 117, 117, 117, 118]\n",
- "[226, 226, 226, 227, 228, 228, 228, 229, 230, 231]\n",
- "[268, 269, 271, 271, 274, 275, 277, 277, 277, 278]\n",
- "[172, 172, 173, 173, 173, 173, 173, 173, 174, 174]\n",
- "[154, 154, 154, 154, 155, 155, 155, 155, 155, 155]\n",
- "[135, 135, 135, 135, 136, 136, 136, 136, 137, 137]\n",
- "[342, 342, 344, 345, 347, 347, 348, 349, 350, 351]\n",
- "[303, 305, 305, 307, 309, 309, 310, 310, 311, 312]\n",
- "[143, 143, 143, 143, 143, 143, 143, 144, 144, 144]\n",
- "[168, 168, 168, 169, 169, 169, 169, 169, 169, 170]\n",
- "[121, 121, 122, 122, 122, 122, 122, 122, 122, 123]\n",
- "[163, 163, 164, 164, 164, 164, 164, 165, 165, 165]\n",
- "[235, 235, 236, 236, 237, 238, 238, 238, 238, 238]\n",
- "[280, 281, 281, 282, 282, 282, 283, 283, 284, 284]\n",
- "[44, 44, 45, 45, 46, 47, 47, 47, 48, 48]\n",
- "[159, 159, 159, 159, 160, 160, 161, 161, 161, 161]\n",
- "[118, 118, 119, 119, 119, 119, 119, 119, 119, 120]\n",
- "[426, 428, 428, 429, 430, 430, 433, 433, 438, 441]\n",
- "[182, 183, 183, 184, 184, 185, 185, 185, 185, 186]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[124, 124, 124, 124, 124, 124, 124, 124, 124, 124]\n",
- "[63, 64, 64, 64, 65, 65, 65, 65, 66, 67]\n",
- "[249, 249, 251, 251, 252, 252, 252, 252, 253, 255]\n",
- "[150, 151, 151, 151, 151, 151, 152, 152, 152, 152]\n",
- "[215, 215, 215, 215, 216, 216, 216, 217, 218, 218]\n",
- "[178, 178, 179, 179, 179, 179, 180, 180, 180, 180]\n",
- "[322, 322, 323, 324, 325, 325, 326, 327, 327, 329]\n",
- "[313, 315, 318, 318, 319, 320, 320, 321, 321, 321]\n",
- "[243, 245, 245, 246, 246, 246, 247, 247, 248, 248]\n",
- "[149, 149, 149, 149, 150, 150, 150, 150, 150, 150]\n",
- "[201, 202, 202, 202, 202, 202, 202, 203, 203, 204]\n",
- "[329, 330, 331, 331, 332, 333, 334, 334, 336, 337]\n",
- "[208, 208, 209, 209, 210, 210, 210, 210, 211, 211]\n",
- "[124, 124, 125, 125, 125, 125, 125, 125, 125, 126]\n",
- "[132, 132, 132, 133, 133, 133, 134, 134, 134, 134]\n",
- "[146, 146, 147, 148, 148, 148, 148, 148, 148, 148]\n",
- "[68, 69, 69, 72, 73, 74, 74, 76, 76, 76]\n",
- "[157, 157, 157, 157, 157, 157, 157, 158, 158, 159]\n",
- "[212, 212, 212, 212, 213, 214, 214, 215, 215, 215]\n",
- "[131, 131, 131, 131, 131, 131, 131, 131, 132, 132]\n",
- "[381, 382, 383, 384, 385, 385, 386, 388, 388, 389]\n",
- "[571, 579, 586, 586, 587, 597, 603, 607, 608, 615]\n",
- "[188, 189, 190, 190, 190, 191, 191, 191, 191, 192]\n",
- "[49, 49, 49, 50, 51, 51, 52, 52, 53, 54]\n",
- "[736, 761, 797, 800, 804, 811, 834, 854, 873, 906]\n",
- "[205, 205, 205, 206, 207, 207, 208, 208, 208, 208]\n",
- "[502, 503, 504, 505, 505, 506, 506, 515, 516, 519]\n",
- "[918, 931, 951, 967, 975, 977, 991, 996, 1148, 1527]\n",
- "[15, 31, 32, 36, 37, 40, 40, 41, 42, 43]\n",
- "[470, 470, 471, 471, 476, 477, 477, 482, 485, 486]\n",
- "[192, 192, 193, 194, 194, 194, 195, 197, 198, 199]\n",
- "[263, 264, 264, 265, 265, 266, 266, 267, 267, 268]\n",
- "[285, 285, 289, 291, 291, 291, 292, 292, 292, 294]\n",
- "[94, 94, 95, 95, 96, 97, 97, 98, 98, 98]\n",
- "[255, 256, 256, 257, 257, 258, 259, 261, 261, 262]\n",
- "[112, 113, 113, 113, 113, 113, 114, 114, 114, 114]\n",
- "[58, 58, 59, 59, 60, 60, 61, 61, 61, 62]\n",
- "[109, 110, 110, 111, 111, 111, 111, 111, 112, 112]\n",
- "[287, 288, 288, 289, 290, 290, 291, 292, 294, 295]\n",
- "[77, 78, 78, 79, 79, 80, 81, 82, 82, 82]\n",
- "[147, 147, 148, 148, 148, 148, 148, 148, 148, 148]\n",
- "[122, 122, 122, 122, 122, 122, 122, 122, 122, 123]\n",
- "[26, 27, 28, 33, 34, 35, 37, 38, 40, 41]\n",
- "[69, 69, 70, 70, 71, 71, 71, 71, 72, 73]\n",
- "[219, 219, 220, 220, 220, 220, 221, 221, 222, 223]\n",
- "[113, 113, 113, 113, 113, 113, 113, 113, 114, 114]\n",
- "[178, 178, 178, 179, 179, 179, 180, 180, 180, 181]\n",
- "[207, 207, 208, 209, 211, 212, 212, 213, 213, 213]\n",
- "[313, 313, 314, 314, 316, 318, 318, 323, 324, 325]\n",
- "[299, 300, 300, 301, 302, 303, 303, 305, 306, 306]\n",
- "[128, 128, 128, 129, 129, 129, 129, 129, 129, 129]\n",
- "[139, 140, 140, 140, 140, 140, 140, 140, 140, 141]\n",
- "[60, 61, 63, 63, 63, 64, 64, 65, 68, 68]\n",
- "[156, 156, 156, 156, 156, 157, 157, 157, 157, 157]\n",
- "[223, 224, 224, 225, 225, 227, 227, 228, 228, 228]\n",
- "[537, 539, 540, 546, 548, 549, 554, 556, 560, 569]\n",
- "[253, 253, 255, 255, 256, 258, 258, 259, 260, 260]\n",
- "[447, 448, 450, 450, 452, 455, 457, 461, 461, 464]\n",
- "[440, 441, 441, 442, 442, 442, 443, 444, 446, 446]\n",
- "[117, 118, 118, 118, 118, 118, 118, 119, 119, 119]\n",
- "[202, 203, 203, 203, 203, 203, 205, 206, 206, 206]\n",
- "[193, 193, 194, 195, 196, 196, 196, 196, 196, 196]\n",
- "[510, 511, 519, 525, 526, 528, 530, 533, 533, 533]\n",
- "[124, 124, 125, 125, 126, 126, 126, 126, 126, 126]\n",
- "[678, 685, 687, 694, 695, 698, 700, 701, 708, 709]\n",
- "[356, 357, 357, 358, 359, 359, 361, 362, 365, 366]\n",
- "[154, 154, 154, 154, 155, 155, 155, 155, 156, 156]\n",
- "[213, 213, 213, 214, 214, 214, 215, 215, 216, 216]\n",
- "[168, 168, 168, 168, 168, 169, 169, 169, 169, 169]\n",
- "[242, 244, 244, 245, 246, 246, 247, 248, 249, 249]\n",
- "[250, 250, 250, 250, 250, 250, 250, 251, 251, 252]\n",
- "[273, 273, 273, 273, 273, 274, 274, 275, 275, 275]\n",
- "[126, 126, 127, 127, 127, 127, 127, 128, 128, 128]\n",
- "[172, 173, 173, 173, 173, 173, 173, 174, 175, 175]\n",
- "[56, 57, 57, 57, 58, 59, 59, 59, 59, 60]\n",
- "[493, 494, 499, 501, 503, 505, 505, 507, 508, 509]\n",
- "[115, 115, 115, 115, 115, 116, 116, 116, 116, 116]\n",
- "[366, 367, 368, 368, 369, 372, 375, 375, 376, 377]\n",
- "[279, 280, 280, 281, 282, 282, 282, 285, 286, 286]\n",
- "[163, 164, 164, 165, 165, 165, 165, 166, 166, 166]\n",
- "[185, 185, 185, 185, 186, 186, 186, 186, 188, 188]\n",
- "[129, 129, 129, 129, 130, 130, 130, 130, 130, 131]\n",
- "[216, 217, 217, 217, 217, 217, 218, 218, 219, 219]\n",
- "[333, 333, 333, 335, 337, 338, 338, 340, 340, 340]\n",
- "[49, 50, 50, 52, 52, 53, 54, 55, 55, 56]\n",
- "[467, 468, 473, 475, 475, 479, 482, 485, 486, 492]\n",
- "[117, 117, 117, 117, 117, 117, 117, 117, 117, 117]\n",
- "[408, 416, 419, 419, 420, 421, 421, 421, 422, 423]\n",
- "[149, 149, 150, 150, 150, 150, 151, 151, 151, 151]\n",
- "[104, 104, 106, 106, 107, 107, 108, 108, 108, 108]\n",
- "[176, 176, 176, 177, 177, 177, 177, 177, 177, 177]\n",
- "[73, 73, 74, 75, 75, 76, 77, 77, 77, 77]\n",
- "[424, 426, 428, 434, 434, 435, 437, 438, 438, 440]\n",
- "[891, 896, 947, 973, 984, 993, 993, 1006, 1076, 2470]\n",
- "[189, 189, 190, 190, 190, 191, 191, 191, 192, 192]\n",
- "[267, 268, 269, 269, 269, 270, 270, 271, 272, 273]\n",
- "[238, 238, 239, 239, 239, 239, 240, 241, 241, 242]\n",
- "[235, 235, 236, 236, 236, 237, 237, 237, 237, 238]\n",
- "[261, 261, 263, 263, 264, 264, 265, 265, 265, 267]\n",
- "[151, 151, 152, 152, 152, 152, 153, 153, 154, 154]\n",
- "[188, 188, 188, 188, 188, 188, 188, 189, 189, 189]\n",
- "[84, 84, 84, 85, 86, 87, 87, 89, 90, 90]\n",
- "[144, 144, 145, 145, 145, 145, 145, 145, 145, 146]\n",
- "[181, 181, 181, 182, 183, 183, 184, 184, 184, 185]\n",
- "[198, 198, 198, 198, 199, 199, 199, 200, 201, 202]\n",
- "[132, 132, 132, 132, 133, 133, 133, 133, 133, 133]\n",
- "[123, 123, 123, 123, 123, 123, 124, 124, 124, 124]\n",
- "[138, 138, 138, 138, 139, 139, 139, 139, 139, 139]\n",
- "[196, 197, 197, 197, 197, 197, 197, 197, 197, 198]\n",
- "[378, 378, 379, 380, 381, 385, 386, 388, 391, 393]\n",
- "[295, 295, 296, 296, 296, 297, 297, 297, 298, 299]\n",
- "[393, 395, 399, 400, 401, 402, 402, 403, 403, 404]\n",
- "[120, 120, 120, 120, 120, 121, 121, 121, 122, 122]\n",
- "[143, 143, 143, 144, 144, 144, 144, 144, 144, 144]\n",
- "[96, 97, 97, 98, 99, 99, 100, 100, 100, 100]\n",
- "[325, 327, 327, 328, 328, 329, 330, 332, 332, 332]\n",
- "[146, 146, 146, 146, 146, 146, 146, 146, 147, 147]\n",
- "[169, 169, 170, 170, 171, 171, 171, 171, 171, 171]\n",
- "[799, 801, 802, 809, 818, 830, 859, 863, 878, 882]\n",
- "[229, 230, 230, 231, 232, 233, 234, 234, 235, 235]\n",
- "[109, 109, 109, 111, 111, 112, 112, 112, 113, 113]\n",
- "[160, 160, 160, 160, 160, 161, 161, 161, 161, 161]\n",
- "[92, 92, 92, 93, 93, 94, 94, 95, 95, 95]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 137, 138]\n",
- "[133, 133, 133, 133, 134, 134, 134, 134, 134, 134]\n",
- "[275, 275, 276, 276, 277, 277, 278, 278, 278, 278]\n",
- "[572, 572, 577, 582, 583, 585, 591, 595, 596, 607]\n",
- "[161, 162, 162, 162, 162, 163, 163, 163, 163, 163]\n",
- "[710, 710, 748, 748, 751, 756, 756, 771, 776, 787]\n",
- "[131, 131, 131, 131, 131, 131, 132, 132, 132, 132]\n",
- "[141, 141, 141, 141, 142, 142, 142, 142, 142, 142]\n",
- "[618, 626, 635, 646, 647, 647, 659, 665, 665, 674]\n",
- "[41, 41, 41, 42, 43, 43, 44, 46, 47, 49]\n",
- "[157, 157, 158, 159, 159, 159, 159, 159, 159, 159]\n",
- "[134, 134, 134, 135, 135, 135, 135, 135, 135, 136]\n",
- "[307, 308, 308, 308, 309, 310, 311, 311, 312, 312]\n",
- "[342, 344, 344, 345, 346, 347, 348, 349, 352, 355]\n",
- "[101, 101, 102, 102, 102, 102, 102, 103, 103, 103]\n",
- "[239, 239, 240, 240, 242, 242, 243, 243, 244, 244]\n",
- "[132, 132, 133, 133, 133, 133, 134, 134, 134, 134]\n",
- "[320, 320, 320, 322, 322, 324, 325, 326, 328, 328]\n",
- "[449, 449, 451, 454, 454, 457, 458, 459, 459, 460]\n",
- "[381, 382, 385, 385, 386, 388, 388, 394, 395, 396]\n",
- "[109, 109, 110, 110, 110, 110, 111, 111, 111, 111]\n",
- "[189, 190, 190, 190, 190, 190, 190, 191, 191, 191]\n",
- "[642, 649, 652, 664, 669, 673, 703, 705, 706, 713]\n",
- "[270, 270, 271, 271, 272, 273, 275, 276, 276, 277]\n",
- "[120, 120, 120, 120, 120, 120, 120, 121, 121, 121]\n",
- "[191, 192, 192, 192, 192, 192, 193, 193, 193, 193]\n",
- "[295, 296, 296, 296, 296, 296, 298, 300, 302, 303]\n",
- "[219, 220, 220, 220, 221, 221, 221, 221, 222, 222]\n",
- "[304, 304, 306, 308, 309, 310, 310, 311, 313, 314]\n",
- "[11, 28, 29, 33, 33, 34, 34, 36, 36, 38]\n",
- "[146, 146, 147, 147, 147, 147, 147, 147, 147, 148]\n",
- "[88, 89, 91, 94, 94, 94, 94, 95, 95, 95]\n",
- "[129, 129, 129, 129, 130, 130, 130, 130, 130, 130]\n",
- "[101, 101, 101, 101, 101, 102, 102, 102, 103, 103]\n",
- "[152, 152, 152, 152, 152, 153, 153, 153, 153, 154]\n",
- "[231, 231, 232, 232, 233, 233, 234, 235, 235, 236]\n",
- "[134, 134, 134, 134, 135, 135, 135, 135, 135, 135]\n",
- "[73, 73, 74, 74, 74, 75, 75, 75, 75, 79]\n",
- "[569, 569, 573, 575, 584, 589, 590, 596, 602, 606]\n",
- "[338, 339, 340, 340, 340, 341, 342, 343, 352, 352]\n",
- "[608, 612, 615, 615, 616, 617, 618, 626, 628, 641]\n",
- "[104, 104, 104, 104, 105, 105, 106, 106, 107, 107]\n",
- "[123, 123, 123, 123, 123, 124, 124, 124, 124, 124]\n",
- "[39, 40, 40, 44, 44, 44, 45, 45, 47, 48]\n",
- "[130, 131, 131, 131, 131, 131, 131, 131, 131, 132]\n",
- "[118, 118, 118, 118, 118, 118, 118, 118, 119, 119]\n",
- "[544, 544, 545, 545, 546, 559, 560, 562, 565, 568]\n",
- "[728, 733, 744, 748, 752, 754, 765, 783, 793, 799]\n",
- "[264, 264, 265, 266, 266, 268, 268, 268, 268, 268]\n",
- "[107, 107, 107, 108, 108, 108, 108, 108, 108, 108]\n",
- "[186, 186, 187, 187, 188, 188, 189, 189, 189, 189]\n",
- "[285, 285, 285, 286, 286, 287, 291, 294, 294, 294]\n",
- "[119, 119, 119, 119, 119, 119, 120, 120, 120, 120]\n",
- "[163, 164, 164, 164, 165, 165, 165, 166, 166, 166]\n",
- "[170, 170, 170, 170, 170, 171, 171, 171, 171, 172]\n",
- "[244, 244, 245, 245, 246, 247, 247, 248, 248, 249]\n",
- "[370, 370, 372, 377, 378, 380, 380, 380, 381, 381]\n",
- "[315, 315, 316, 316, 317, 317, 317, 319, 319, 319]\n",
- "[432, 433, 436, 437, 441, 442, 442, 443, 446, 446]\n",
- "[257, 257, 257, 257, 258, 259, 259, 259, 261, 261]\n",
- "[204, 204, 204, 204, 204, 204, 205, 205, 206, 207]\n",
- "[166, 166, 166, 166, 167, 167, 167, 167, 167, 168]\n",
- "[148, 149, 149, 149, 149, 150, 150, 150, 150, 150]\n",
- "[178, 178, 179, 179, 179, 179, 179, 180, 180, 180]\n",
- "[156, 156, 156, 156, 157, 157, 158, 158, 158, 158]\n",
- "[86, 87, 87, 87, 87, 88, 88, 88, 88, 88]\n",
- "[801, 802, 808, 822, 825, 866, 869, 871, 888, 908]\n",
- "[154, 154, 154, 154, 154, 155, 156, 156, 156, 156]\n",
- "[249, 250, 250, 250, 250, 251, 255, 255, 255, 257]\n",
- "[180, 180, 181, 181, 181, 181, 182, 182, 183, 183]\n",
- "[172, 173, 173, 173, 174, 174, 177, 177, 178, 178]\n",
- "[60, 61, 63, 63, 64, 64, 64, 65, 66, 67]\n",
- "[200, 200, 201, 201, 201, 202, 202, 202, 202, 203]\n",
- "[352, 353, 353, 355, 355, 356, 357, 359, 361, 362]\n",
- "[95, 95, 96, 97, 98, 100, 100, 100, 100, 101]\n",
- "[168, 168, 169, 169, 169, 169, 170, 170, 170, 170]\n",
- "[138, 138, 138, 138, 138, 138, 138, 138, 138, 139]\n",
- "[58, 58, 58, 59, 59, 59, 60, 60, 60, 60]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 137, 138]\n",
- "[490, 491, 495, 497, 499, 501, 506, 507, 507, 515]\n",
- "[466, 467, 474, 477, 479, 481, 482, 487, 488, 490]\n",
- "[159, 159, 160, 160, 160, 161, 161, 161, 161, 161]\n",
- "[49, 49, 50, 51, 53, 53, 53, 55, 57, 57]\n",
- "[140, 140, 140, 140, 141, 141, 141, 141, 141, 142]\n",
- "[236, 236, 236, 236, 236, 236, 237, 237, 237, 238]\n",
- "[207, 207, 207, 208, 209, 210, 210, 210, 210, 210]\n",
- "[126, 126, 127, 127, 127, 127, 127, 127, 127, 128]\n",
- "[124, 124, 125, 125, 125, 125, 125, 125, 126, 126]\n",
- "[139, 139, 139, 139, 139, 140, 140, 140, 140, 140]\n",
- "[193, 193, 194, 194, 194, 194, 194, 194, 194, 196]\n",
- "[278, 280, 280, 281, 281, 282, 282, 283, 284, 285]\n",
- "[183, 183, 183, 184, 184, 184, 184, 185, 185, 185]\n",
- "[113, 114, 114, 114, 114, 114, 114, 115, 115, 115]\n",
- "[128, 128, 128, 128, 128, 128, 128, 128, 129, 129]\n",
- "[161, 161, 161, 161, 162, 162, 162, 163, 163, 163]\n",
- "[261, 261, 261, 261, 262, 262, 263, 263, 263, 263]\n",
- "[115, 116, 116, 116, 116, 117, 117, 117, 117, 117]\n",
- "[520, 522, 525, 526, 526, 532, 532, 532, 536, 539]\n",
- "[197, 197, 197, 198, 198, 198, 199, 199, 199, 199]\n",
- "[68, 69, 69, 70, 70, 71, 71, 71, 72, 73]\n",
- "[423, 425, 426, 427, 428, 429, 432, 432, 432, 432]\n",
- "[150, 150, 150, 151, 151, 151, 151, 151, 151, 151]\n",
- "[121, 121, 121, 122, 122, 122, 122, 122, 123, 123]\n",
- "[363, 364, 364, 365, 365, 366, 367, 369, 369, 370]\n",
- "[923, 947, 953, 973, 978, 979, 985, 1001, 1066, 1839]\n",
- "[222, 223, 226, 226, 227, 227, 227, 228, 229, 230]\n",
- "[83, 83, 84, 84, 84, 85, 85, 85, 85, 86]\n",
- "[215, 216, 216, 216, 216, 216, 217, 217, 217, 218]\n",
- "[397, 397, 399, 404, 406, 409, 413, 418, 421, 421]\n",
- "[142, 142, 142, 143, 143, 143, 144, 144, 144, 144]\n",
- "[144, 144, 145, 145, 146, 146, 146, 146, 146, 146]\n",
- "[211, 211, 212, 212, 213, 213, 213, 214, 214, 214]\n",
- "[112, 112, 112, 112, 112, 112, 112, 112, 113, 113]\n",
- "[329, 330, 330, 332, 333, 336, 336, 337, 338, 338]\n",
- "[79, 79, 79, 80, 80, 80, 80, 81, 83, 83]\n",
- "[259, 259, 259, 260, 260, 262, 262, 264, 264, 266]\n",
- "[23, 28, 32, 34, 35, 37, 37, 38, 41, 42]\n",
- "[155, 155, 156, 156, 156, 157, 157, 157, 158, 158]\n",
- "[206, 206, 206, 207, 207, 207, 207, 208, 209, 209]\n",
- "[267, 269, 271, 271, 273, 274, 274, 275, 277, 278]\n",
- "[123, 124, 124, 124, 124, 124, 124, 124, 124, 124]\n",
- "[143, 143, 143, 144, 144, 144, 144, 144, 145, 145]\n",
- "[129, 129, 129, 130, 130, 130, 130, 130, 130, 130]\n",
- "[200, 201, 201, 202, 202, 203, 204, 205, 205, 206]\n",
- "[120, 120, 120, 120, 120, 120, 121, 121, 121, 121]\n",
- "[81, 81, 81, 82, 82, 83, 83, 83, 83, 84]\n",
- "[115, 115, 116, 116, 116, 116, 116, 116, 117, 117]\n",
- "[158, 158, 159, 159, 159, 160, 160, 160, 160, 161]\n",
- "[109, 110, 110, 110, 110, 110, 111, 111, 111, 111]\n",
- "[373, 376, 376, 376, 377, 377, 382, 383, 384, 384]\n",
- "[121, 121, 121, 121, 122, 122, 122, 122, 122, 122]\n",
- "[296, 296, 297, 298, 298, 299, 299, 300, 300, 300]\n",
- "[152, 153, 153, 154, 154, 154, 154, 154, 154, 154]\n",
- "[168, 168, 168, 168, 169, 169, 169, 169, 169, 169]\n",
- "[107, 107, 107, 107, 108, 108, 108, 108, 108, 109]\n",
- "[444, 445, 446, 446, 451, 454, 462, 463, 464, 465]\n",
- "[306, 306, 308, 308, 308, 309, 310, 312, 312, 313]\n",
- "[279, 279, 281, 281, 281, 283, 283, 283, 284, 288]\n",
- "[101, 101, 101, 101, 102, 102, 102, 102, 104, 104]\n",
- "[49, 50, 50, 50, 50, 51, 51, 52, 54, 54]\n",
- "[145, 145, 145, 145, 145, 145, 146, 146, 146, 146]\n",
- "[126, 126, 126, 126, 126, 127, 127, 127, 127, 127]\n",
- "[61, 63, 63, 63, 63, 64, 65, 66, 66, 67]\n",
- "[250, 251, 251, 251, 252, 252, 253, 253, 254, 255]\n",
- "[328, 328, 329, 329, 330, 330, 330, 331, 332, 334]\n",
- "[321, 323, 323, 323, 324, 326, 327, 327, 327, 327]\n",
- "[177, 177, 177, 177, 177, 177, 177, 177, 177, 178]\n",
- "[161, 161, 161, 161, 162, 162, 163, 163, 163, 163]\n",
- "[125, 125, 125, 125, 125, 125, 125, 125, 126, 126]\n",
- "[71, 71, 71, 72, 72, 72, 72, 72, 74, 74]\n",
- "[210, 211, 212, 213, 213, 213, 214, 215, 215, 216]\n",
- "[288, 289, 289, 290, 291, 291, 292, 294, 294, 296]\n",
- "[361, 363, 363, 364, 365, 367, 367, 368, 369, 373]\n",
- "[404, 404, 410, 411, 412, 412, 413, 414, 414, 419]\n",
- "[104, 104, 105, 105, 105, 105, 106, 106, 106, 106]\n",
- "[489, 489, 490, 498, 501, 503, 506, 509, 511, 513]\n",
- "[56, 57, 57, 58, 58, 60, 60, 60, 60, 61]\n",
- "[178, 178, 179, 179, 179, 179, 179, 179, 180, 180]\n",
- "[119, 119, 119, 120, 120, 120, 120, 120, 120, 120]\n",
- "[132, 132, 132, 132, 132, 132, 132, 132, 133, 133]\n",
- "[187, 187, 188, 188, 189, 189, 189, 190, 190, 190]\n",
- "[255, 255, 256, 256, 257, 257, 258, 258, 258, 258]\n",
- "[191, 191, 191, 192, 192, 193, 193, 193, 194, 194]\n",
- "[149, 149, 149, 149, 149, 150, 150, 150, 150, 150]\n",
- "[184, 184, 184, 184, 185, 185, 185, 185, 186, 187]\n",
- "[131, 131, 131, 131, 131, 131, 131, 131, 132, 132]\n",
- "[150, 151, 151, 151, 152, 152, 152, 152, 152, 152]\n",
- "[127, 127, 128, 128, 128, 128, 128, 128, 128, 129]\n",
- "[75, 75, 76, 76, 76, 76, 77, 79, 80, 81]\n",
- "[117, 117, 117, 118, 118, 119, 119, 119, 119, 119]\n",
- "[142, 142, 142, 142, 142, 142, 142, 142, 142, 143]\n",
- "[169, 170, 170, 170, 170, 170, 170, 171, 171, 171]\n",
- "[429, 429, 429, 435, 436, 437, 438, 438, 441, 441]\n",
- "[232, 233, 233, 234, 234, 234, 234, 235, 235, 235]\n",
- "[235, 236, 236, 236, 236, 238, 238, 239, 240, 241]\n",
- "[195, 195, 195, 196, 196, 196, 196, 196, 197, 198]\n",
- "[84, 84, 85, 87, 87, 88, 90, 90, 90, 90]\n",
- "[137, 137, 137, 137, 138, 138, 138, 138, 138, 139]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 123, 123]\n",
- "[221, 223, 223, 223, 223, 224, 224, 225, 225, 225]\n",
- "[112, 112, 112, 112, 113, 113, 113, 114, 114, 114]\n",
- "[139, 139, 139, 139, 139, 140, 140, 140, 140, 140]\n",
- "[245, 245, 246, 246, 247, 248, 248, 248, 249, 250]\n",
- "[799, 803, 818, 822, 825, 836, 854, 866, 880, 883]\n",
- "[242, 242, 242, 242, 243, 244, 244, 244, 244, 245]\n",
- "[385, 389, 392, 392, 393, 393, 394, 397, 401, 402]\n",
- "[67, 67, 68, 68, 68, 69, 69, 69, 70, 71]\n",
- "[140, 140, 140, 141, 141, 141, 141, 141, 141, 142]\n",
- "[356, 357, 357, 358, 358, 359, 360, 360, 361, 361]\n",
- "[313, 314, 315, 317, 318, 319, 320, 321, 321, 321]\n",
- "[174, 175, 175, 175, 175, 176, 176, 176, 176, 176]\n",
- "[515, 516, 521, 522, 523, 528, 537, 541, 543, 543]\n",
- "[226, 226, 226, 226, 227, 228, 228, 229, 231, 232]\n",
- "[147, 147, 147, 147, 147, 147, 147, 148, 148, 148]\n",
- "[180, 180, 180, 181, 181, 181, 181, 181, 181, 181]\n",
- "[171, 172, 172, 172, 173, 173, 173, 173, 173, 174]\n",
- "[472, 474, 476, 478, 479, 480, 484, 486, 487, 489]\n",
- "[548, 550, 552, 553, 554, 555, 557, 570, 573, 575]\n",
- "[465, 466, 466, 468, 469, 469, 470, 471, 472, 472]\n",
- "[335, 339, 341, 341, 342, 343, 347, 347, 347, 348]\n",
- "[692, 693, 717, 721, 722, 741, 746, 765, 773, 779]\n",
- "[133, 133, 133, 133, 133, 133, 133, 134, 134, 134]\n",
- "[630, 634, 642, 649, 650, 651, 657, 667, 684, 688]\n",
- "[300, 301, 301, 302, 302, 302, 304, 305, 305, 306]\n",
- "[91, 92, 94, 95, 95, 95, 96, 96, 97, 97]\n",
- "[164, 165, 165, 165, 166, 167, 167, 167, 168, 168]\n",
- "[135, 135, 135, 136, 136, 136, 136, 136, 137, 137]\n",
- "[348, 349, 349, 349, 350, 350, 352, 353, 354, 356]\n",
- "[216, 216, 216, 218, 218, 219, 220, 220, 221, 221]\n",
- "[181, 182, 182, 182, 182, 183, 183, 184, 184, 184]\n",
- "[97, 97, 97, 97, 97, 98, 98, 99, 100, 100]\n",
- "[42, 43, 45, 45, 46, 46, 47, 48, 48, 49]\n",
- "[892, 908, 911, 924, 956, 966, 970, 990, 1000, 1376]\n",
- "[582, 584, 592, 598, 603, 606, 611, 619, 621, 626]\n",
- "[198, 198, 198, 198, 199, 199, 199, 199, 199, 200]\n",
- "[131, 131, 132, 132, 132, 132, 133, 133, 133, 133]\n",
- "[130, 130, 130, 131, 131, 131, 131, 131, 131, 131]\n",
- "[121, 122, 122, 122, 122, 122, 122, 122, 122, 122]\n",
- "[426, 427, 429, 430, 431, 431, 433, 436, 436, 439]\n",
- "[197, 198, 199, 199, 199, 199, 199, 199, 200, 200]\n",
- "[219, 219, 220, 220, 220, 220, 221, 221, 221, 222]\n",
- "[468, 479, 480, 481, 485, 493, 495, 500, 506, 506]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[237, 237, 238, 239, 240, 241, 241, 241, 242, 242]\n",
- "[173, 173, 173, 173, 173, 173, 173, 173, 175, 175]\n",
- "[190, 190, 191, 191, 192, 193, 193, 193, 193, 194]\n",
- "[358, 359, 360, 360, 366, 367, 370, 370, 372, 372]\n",
- "[128, 129, 129, 129, 129, 129, 129, 130, 130, 130]\n",
- "[137, 137, 137, 137, 137, 137, 138, 138, 138, 138]\n",
- "[154, 154, 154, 154, 154, 155, 155, 155, 155, 156]\n",
- "[119, 119, 119, 119, 119, 119, 120, 120, 120, 120]\n",
- "[243, 243, 243, 244, 244, 245, 245, 245, 246, 247]\n",
- "[162, 162, 162, 162, 162, 163, 163, 164, 164, 164]\n",
- "[233, 234, 235, 235, 236, 236, 236, 236, 236, 236]\n",
- "[152, 152, 153, 153, 153, 153, 153, 153, 154, 154]\n",
- "[204, 204, 205, 206, 206, 206, 207, 207, 208, 209]\n",
- "[88, 88, 89, 89, 89, 90, 90, 91, 91, 91]\n",
- "[267, 267, 268, 268, 269, 269, 269, 270, 271, 272]\n",
- "[104, 105, 105, 106, 106, 106, 106, 106, 106, 107]\n",
- "[138, 139, 139, 139, 139, 140, 140, 140, 140, 140]\n",
- "[96, 96, 97, 99, 99, 100, 100, 100, 100, 101]\n",
- "[145, 145, 146, 146, 146, 146, 147, 147, 147, 147]\n",
- "[170, 170, 171, 171, 172, 172, 172, 173, 173, 173]\n",
- "[60, 61, 61, 61, 61, 61, 62, 63, 63, 63]\n",
- "[321, 323, 323, 323, 324, 324, 325, 325, 328, 329]\n",
- "[156, 156, 157, 157, 157, 157, 157, 157, 157, 157]\n",
- "[857, 909, 939, 974, 996, 997, 1000, 1001, 1002, 1296]\n",
- "[209, 209, 210, 210, 210, 210, 211, 211, 211, 211]\n",
- "[118, 118, 118, 118, 118, 118, 118, 118, 119, 119]\n",
- "[439, 439, 440, 441, 443, 446, 447, 447, 449, 451]\n",
- "[212, 213, 213, 213, 214, 214, 215, 215, 215, 215]\n",
- "[69, 69, 70, 71, 71, 72, 72, 72, 72, 72]\n",
- "[393, 394, 399, 399, 399, 400, 400, 401, 401, 403]\n",
- "[287, 288, 289, 289, 289, 291, 292, 292, 293, 293]\n",
- "[273, 274, 277, 279, 281, 282, 283, 284, 285, 286]\n",
- "[143, 143, 144, 144, 144, 144, 144, 144, 145, 145]\n",
- "[141, 141, 141, 141, 142, 142, 142, 142, 142, 142]\n",
- "[636, 641, 649, 650, 652, 652, 658, 669, 671, 685]\n",
- "[133, 133, 133, 134, 134, 134, 134, 134, 135, 135]\n",
- "[231, 231, 231, 231, 232, 232, 232, 233, 233, 233]\n",
- "[175, 176, 177, 177, 177, 177, 177, 177, 178, 178]\n",
- "[135, 135, 135, 135, 135, 136, 136, 136, 136, 137]\n",
- "[72, 73, 74, 74, 75, 75, 76, 76, 77, 77]\n",
- "[186, 187, 187, 187, 187, 188, 188, 188, 188, 190]\n",
- "[689, 693, 703, 712, 714, 726, 741, 754, 759, 765]\n",
- "[49, 50, 50, 50, 51, 53, 54, 54, 55, 55]\n",
- "[124, 124, 124, 124, 124, 124, 124, 124, 124, 124]\n",
- "[126, 126, 126, 126, 127, 127, 127, 127, 127, 127]\n",
- "[542, 542, 545, 545, 549, 556, 559, 559, 565, 586]\n",
- "[149, 150, 150, 150, 150, 150, 150, 150, 151, 151]\n",
- "[179, 180, 181, 181, 181, 181, 182, 182, 182, 182]\n",
- "[151, 151, 151, 151, 151, 151, 151, 151, 151, 152]\n",
- "[330, 331, 332, 332, 334, 335, 337, 338, 339, 340]\n",
- "[112, 112, 113, 113, 113, 113, 113, 114, 115, 115]\n",
- "[110, 110, 110, 111, 111, 112, 112, 112, 112, 112]\n",
- "[85, 85, 86, 86, 86, 86, 86, 87, 87, 87]\n",
- "[115, 115, 115, 116, 116, 116, 116, 117, 117, 117]\n",
- "[178, 178, 178, 178, 178, 179, 179, 179, 179, 179]\n",
- "[344, 348, 349, 351, 354, 354, 357, 357, 358, 358]\n",
- "[142, 142, 142, 143, 143, 143, 143, 143, 143, 143]\n",
- "[340, 341, 341, 341, 341, 342, 342, 342, 342, 344]\n",
- "[63, 63, 64, 65, 65, 67, 67, 68, 68, 69]\n",
- "[158, 158, 158, 159, 159, 159, 159, 159, 159, 160]\n",
- "[511, 515, 520, 523, 524, 527, 528, 531, 535, 535]\n",
- "[182, 183, 183, 183, 183, 183, 184, 184, 184, 184]\n",
- "[194, 195, 195, 195, 195, 196, 196, 196, 196, 197]\n",
- "[588, 588, 598, 600, 602, 604, 609, 617, 619, 623]\n",
- "[766, 766, 778, 780, 816, 817, 825, 832, 834, 856]\n",
- "[261, 261, 262, 262, 263, 264, 264, 265, 266, 266]\n",
- "[294, 294, 295, 296, 298, 298, 300, 301, 302, 302]\n",
- "[10, 24, 28, 32, 38, 39, 39, 39, 42, 42]\n",
- "[373, 374, 378, 378, 378, 380, 383, 386, 392, 392]\n",
- "[451, 452, 454, 455, 455, 456, 458, 459, 460, 467]\n",
- "[201, 201, 201, 201, 202, 202, 202, 203, 203, 204]\n",
- "[43, 44, 44, 45, 45, 49, 49, 49, 49, 49]\n",
- "[253, 253, 254, 254, 257, 257, 257, 258, 258, 261]\n",
- "[303, 304, 306, 307, 307, 310, 311, 312, 314, 314]\n",
- "[164, 165, 165, 165, 165, 166, 166, 166, 166, 166]\n",
- "[127, 128, 128, 128, 128, 128, 128, 128, 128, 128]\n",
- "[107, 107, 107, 108, 109, 109, 109, 109, 109, 109]\n",
- "[247, 248, 249, 249, 249, 251, 251, 252, 252, 253]\n",
- "[101, 101, 102, 102, 103, 103, 104, 104, 104, 104]\n",
- "[166, 167, 167, 168, 168, 168, 169, 169, 169, 169]\n",
- "[124, 124, 125, 125, 125, 125, 125, 125, 126, 126]\n",
- "[91, 92, 93, 94, 94, 95, 95, 95, 96, 96]\n",
- "[184, 184, 184, 185, 185, 185, 185, 185, 186, 186]\n",
- "[314, 315, 315, 316, 317, 318, 318, 321, 321, 321]\n",
- "[160, 160, 160, 161, 161, 161, 161, 161, 161, 162]\n",
- "[120, 120, 120, 120, 120, 121, 121, 121, 121, 121]\n",
- "[148, 148, 148, 149, 149, 149, 149, 149, 149, 149]\n",
- "[56, 56, 56, 57, 57, 57, 58, 58, 59, 60]\n",
- "[225, 225, 225, 226, 226, 226, 227, 228, 228, 230]\n",
- "[403, 403, 403, 409, 412, 413, 417, 419, 421, 422]\n",
- "[215, 215, 215, 216, 216, 217, 217, 218, 218, 218]\n",
- "[78, 78, 80, 81, 81, 81, 81, 83, 83, 84]\n",
- "[199, 199, 200, 200, 201, 201, 202, 203, 203, 203]\n",
- "[454, 456, 459, 459, 461, 463, 464, 467, 468, 468]\n",
- "[471, 473, 474, 474, 475, 476, 476, 477, 477, 484]\n",
- "[166, 166, 166, 166, 166, 167, 168, 168, 168, 168]\n",
- "[142, 143, 143, 144, 144, 144, 144, 144, 144, 145]\n",
- "[821, 825, 853, 859, 894, 900, 909, 917, 917, 927]\n",
- "[242, 242, 242, 242, 244, 244, 245, 246, 246, 247]\n",
- "[264, 265, 266, 266, 267, 267, 267, 268, 269, 269]\n",
- "[133, 134, 134, 134, 134, 134, 135, 135, 135, 135]\n",
- "[247, 247, 247, 248, 249, 249, 250, 250, 250, 250]\n",
- "[47, 48, 48, 48, 49, 49, 49, 49, 50, 50]\n",
- "[210, 210, 211, 211, 211, 212, 213, 213, 213, 213]\n",
- "[269, 269, 270, 271, 271, 273, 275, 276, 276, 277]\n",
- "[371, 372, 373, 374, 375, 375, 375, 376, 378, 381]\n",
- "[229, 229, 230, 230, 231, 231, 232, 232, 233, 233]\n",
- "[170, 170, 171, 171, 171, 171, 171, 172, 172, 172]\n",
- "[307, 308, 308, 314, 314, 314, 315, 315, 316, 317]\n",
- "[168, 168, 169, 169, 169, 169, 169, 169, 170, 170]\n",
- "[117, 117, 117, 118, 119, 119, 119, 119, 119, 120]\n",
- "[50, 51, 52, 52, 53, 53, 54, 55, 55, 55]\n",
- "[139, 140, 140, 140, 140, 140, 140, 141, 141, 141]\n",
- "[141, 141, 141, 141, 141, 142, 142, 142, 142, 142]\n",
- "[155, 155, 156, 156, 156, 156, 156, 157, 157, 157]\n",
- "[99, 99, 100, 101, 102, 102, 103, 104, 104, 104]\n",
- "[104, 105, 105, 105, 106, 106, 106, 106, 107, 107]\n",
- "[563, 567, 568, 570, 574, 575, 576, 578, 581, 582]\n",
- "[127, 128, 128, 128, 129, 129, 129, 129, 129, 129]\n",
- "[389, 389, 391, 391, 392, 393, 393, 395, 398, 399]\n",
- "[543, 546, 547, 551, 552, 557, 557, 557, 559, 561]\n",
- "[203, 204, 204, 204, 204, 204, 204, 204, 205, 205]\n",
- "[120, 120, 120, 120, 120, 121, 121, 121, 121, 121]\n",
- "[430, 430, 431, 434, 438, 439, 440, 440, 441, 449]\n",
- "[121, 121, 121, 121, 121, 121, 122, 122, 122, 122]\n",
- "[91, 91, 92, 92, 93, 93, 94, 94, 95, 95]\n",
- "[138, 138, 138, 138, 138, 138, 138, 138, 138, 139]\n",
- "[153, 153, 154, 154, 154, 154, 155, 155, 155, 155]\n",
- "[939, 945, 950, 978, 981, 987, 1000, 1000, 1057, 1263]\n",
- "[60, 61, 61, 62, 62, 64, 65, 65, 65, 66]\n",
- "[381, 382, 384, 386, 386, 387, 387, 387, 388, 388]\n",
- "[234, 235, 236, 237, 238, 238, 238, 239, 240, 241]\n",
- "[193, 193, 194, 194, 194, 194, 195, 196, 196, 196]\n",
- "[82, 82, 83, 84, 84, 84, 85, 85, 85, 86]\n",
- "[150, 151, 151, 151, 151, 151, 152, 152, 153, 153]\n",
- "[188, 188, 188, 189, 190, 190, 190, 191, 191, 192]\n",
- "[357, 357, 358, 359, 359, 360, 363, 366, 366, 366]\n",
- "[177, 177, 177, 178, 179, 179, 179, 179, 180, 180]\n",
- "[55, 55, 56, 57, 57, 58, 59, 59, 60, 60]\n",
- "[115, 115, 115, 115, 116, 116, 116, 116, 116, 116]\n",
- "[186, 186, 186, 186, 186, 187, 187, 187, 187, 187]\n",
- "[183, 183, 183, 184, 184, 184, 184, 185, 185, 186]\n",
- "[219, 220, 220, 222, 222, 222, 222, 222, 223, 224]\n",
- "[635, 635, 643, 657, 681, 684, 701, 709, 713, 724]\n",
- "[129, 130, 130, 130, 130, 130, 131, 131, 131, 131]\n",
- "[175, 176, 176, 176, 176, 176, 177, 177, 177, 177]\n",
- "[330, 333, 334, 335, 338, 338, 338, 338, 340, 341]\n",
- "[146, 147, 147, 147, 147, 147, 147, 147, 148, 149]\n",
- "[107, 107, 108, 108, 108, 109, 109, 109, 110, 110]\n",
- "[294, 296, 296, 296, 297, 298, 298, 298, 299, 300]\n",
- "[164, 164, 164, 164, 164, 164, 165, 165, 166, 166]\n",
- "[400, 400, 400, 403, 403, 405, 405, 408, 409, 412]\n",
- "[87, 87, 87, 89, 89, 90, 90, 90, 90, 91]\n",
- "[225, 225, 225, 226, 227, 227, 228, 228, 228, 228]\n",
- "[319, 319, 319, 320, 324, 325, 325, 326, 329, 330]\n",
- "[173, 173, 173, 174, 174, 174, 174, 174, 175, 175]\n",
- "[278, 278, 279, 279, 280, 283, 283, 283, 285, 285]\n",
- "[250, 250, 251, 251, 252, 253, 254, 254, 254, 255]\n",
- "[158, 158, 158, 158, 158, 159, 159, 159, 159, 160]\n",
- "[342, 343, 344, 344, 344, 344, 346, 346, 348, 349]\n",
- "[160, 160, 160, 160, 161, 161, 161, 161, 162, 162]\n",
- "[517, 528, 529, 529, 534, 535, 536, 537, 538, 540]\n",
- "[162, 162, 162, 163, 163, 163, 163, 163, 163, 163]\n",
- "[135, 135, 135, 135, 136, 136, 136, 136, 136, 136]\n",
- "[300, 300, 301, 301, 302, 303, 305, 306, 306, 307]\n",
- "[214, 214, 215, 215, 215, 215, 216, 217, 218, 218]\n",
- "[584, 593, 594, 594, 599, 609, 613, 616, 625, 634]\n",
- "[95, 96, 96, 96, 96, 97, 97, 97, 98, 99]\n",
- "[261, 261, 261, 262, 263, 263, 263, 263, 263, 264]\n",
- "[126, 126, 126, 126, 127, 127, 127, 127, 127, 127]\n",
- "[77, 77, 77, 78, 78, 80, 80, 81, 81, 81]\n",
- "[495, 496, 501, 501, 501, 504, 506, 509, 511, 517]\n",
- "[70, 71, 71, 73, 74, 74, 75, 75, 76, 76]\n",
- "[206, 206, 206, 207, 207, 208, 208, 209, 209, 210]\n",
- "[131, 132, 132, 132, 133, 133, 133, 133, 133, 133]\n",
- "[66, 66, 67, 67, 67, 68, 68, 69, 69, 69]\n",
- "[124, 124, 124, 125, 125, 125, 125, 125, 125, 126]\n",
- "[145, 145, 145, 146, 146, 146, 146, 146, 146, 146]\n",
- "[285, 286, 287, 287, 287, 288, 289, 290, 290, 290]\n",
- "[413, 413, 414, 418, 421, 422, 422, 425, 426, 426]\n",
- "[181, 181, 181, 181, 181, 182, 182, 182, 182, 182]\n",
- "[136, 136, 137, 137, 137, 137, 137, 137, 138, 138]\n",
- "[149, 149, 149, 149, 149, 149, 149, 149, 149, 150]\n",
- "[255, 256, 256, 258, 260, 260, 260, 260, 261, 261]\n",
- "[349, 350, 350, 352, 352, 353, 353, 353, 355, 356]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 123, 123]\n",
- "[111, 111, 111, 112, 112, 112, 112, 112, 113, 113]\n",
- "[290, 290, 291, 292, 293, 293, 293, 293, 294, 294]\n",
- "[26, 28, 31, 36, 42, 43, 44, 44, 44, 46]\n",
- "[196, 197, 197, 197, 197, 198, 199, 199, 199, 199]\n",
- "[113, 113, 113, 113, 114, 114, 114, 114, 115, 115]\n",
- "[726, 729, 737, 746, 749, 749, 754, 756, 785, 801]\n",
- "[119, 119, 119, 120, 120, 120, 120, 120, 120, 121]\n",
- "[124, 124, 124, 124, 124, 124, 124, 124, 124, 125]\n",
- "[256, 257, 257, 259, 259, 259, 259, 259, 259, 262]\n",
- "[170, 171, 171, 171, 171, 171, 171, 172, 172, 172]\n",
- "[115, 115, 115, 116, 117, 117, 117, 117, 117, 117]\n",
- "[229, 229, 229, 229, 230, 231, 231, 231, 232, 233]\n",
- "[141, 141, 141, 141, 141, 142, 142, 142, 142, 142]\n",
- "[126, 126, 127, 127, 127, 127, 127, 127, 127, 127]\n",
- "[98, 98, 98, 99, 100, 100, 100, 100, 101, 102]\n",
- "[110, 111, 111, 111, 112, 112, 112, 113, 113, 113]\n",
- "[313, 314, 314, 314, 315, 316, 316, 317, 318, 320]\n",
- "[174, 174, 174, 174, 174, 174, 174, 174, 175, 175]\n",
- "[402, 402, 408, 409, 410, 412, 412, 412, 412, 415]\n",
- "[122, 122, 122, 122, 122, 122, 122, 122, 122, 122]\n",
- "[507, 513, 518, 523, 525, 525, 534, 535, 539, 541]\n",
- "[154, 154, 155, 155, 155, 156, 156, 156, 157, 157]\n",
- "[233, 233, 233, 233, 234, 234, 235, 235, 239, 239]\n",
- "[469, 471, 472, 474, 478, 481, 482, 483, 483, 487]\n",
- "[300, 301, 301, 301, 301, 303, 305, 305, 306, 306]\n",
- "[178, 178, 179, 179, 179, 180, 180, 180, 180, 181]\n",
- "[273, 273, 274, 275, 278, 278, 279, 280, 282, 282]\n",
- "[164, 165, 165, 165, 165, 165, 166, 166, 167, 167]\n",
- "[94, 95, 95, 95, 95, 96, 96, 97, 98, 98]\n",
- "[290, 290, 291, 292, 294, 298, 299, 299, 300, 300]\n",
- "[541, 543, 544, 549, 552, 553, 556, 557, 559, 559]\n",
- "[248, 249, 250, 250, 250, 251, 252, 255, 256, 256]\n",
- "[670, 673, 683, 686, 687, 701, 707, 714, 735, 739]\n",
- "[82, 82, 83, 83, 84, 85, 86, 86, 87, 87]\n",
- "[70, 70, 70, 71, 71, 71, 71, 71, 73, 75]\n",
- "[196, 196, 196, 196, 196, 197, 197, 197, 198, 199]\n",
- "[284, 284, 285, 287, 287, 287, 288, 288, 289, 289]\n",
- "[102, 102, 104, 104, 104, 105, 106, 107, 107, 107]\n",
- "[245, 245, 245, 245, 245, 246, 246, 247, 247, 248]\n",
- "[239, 239, 240, 240, 240, 241, 241, 241, 243, 245]\n",
- "[219, 219, 223, 223, 223, 223, 224, 225, 225, 225]\n",
- "[202, 202, 203, 204, 205, 205, 205, 205, 205, 206]\n",
- "[263, 263, 264, 264, 266, 266, 267, 269, 272, 272]\n",
- "[132, 132, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[225, 225, 225, 225, 226, 226, 227, 228, 228, 228]\n",
- "[136, 136, 136, 136, 136, 136, 136, 137, 137, 137]\n",
- "[212, 213, 213, 213, 214, 215, 215, 215, 216, 216]\n",
- "[145, 145, 145, 145, 145, 145, 145, 145, 145, 145]\n",
- "[147, 147, 147, 147, 148, 148, 148, 148, 148, 148]\n",
- "[107, 108, 108, 108, 108, 109, 109, 109, 110, 110]\n",
- "[122, 123, 123, 123, 123, 123, 123, 123, 123, 124]\n",
- "[133, 134, 134, 134, 134, 134, 134, 135, 135, 136]\n",
- "[416, 416, 417, 417, 420, 420, 420, 420, 420, 421]\n",
- "[47, 47, 48, 48, 48, 50, 51, 51, 51, 51]\n",
- "[26, 32, 34, 38, 40, 40, 41, 41, 42, 42]\n",
- "[746, 750, 751, 756, 762, 766, 775, 793, 811, 833]\n",
- "[559, 566, 569, 569, 569, 570, 575, 577, 577, 583]\n",
- "[181, 181, 182, 182, 182, 182, 182, 183, 183, 183]\n",
- "[456, 457, 458, 460, 461, 461, 461, 462, 465, 467]\n",
- "[113, 113, 114, 114, 114, 114, 114, 114, 115, 115]\n",
- "[160, 160, 160, 160, 160, 161, 162, 162, 163, 163]\n",
- "[43, 45, 46, 46, 46, 46, 46, 46, 46, 47]\n",
- "[143, 143, 143, 143, 144, 144, 144, 144, 144, 144]\n",
- "[151, 151, 151, 151, 151, 152, 152, 152, 152, 152]\n",
- "[961, 974, 976, 988, 989, 992, 998, 1001, 1007, 1723]\n",
- "[207, 207, 208, 208, 209, 209, 209, 211, 212, 212]\n",
- "[125, 125, 125, 125, 125, 125, 126, 126, 126, 126]\n",
- "[167, 167, 167, 167, 168, 168, 168, 168, 168, 168]\n",
- "[833, 844, 859, 860, 883, 889, 917, 928, 947, 956]\n",
- "[149, 149, 149, 149, 149, 150, 150, 150, 151, 151]\n",
- "[129, 129, 129, 129, 129, 129, 129, 129, 130, 130]\n",
- "[57, 57, 58, 58, 59, 59, 60, 60, 61, 61]\n",
- "[320, 323, 324, 326, 326, 327, 327, 330, 331, 331]\n",
- "[192, 192, 192, 193, 193, 194, 194, 194, 196, 196]\n",
- "[380, 384, 386, 388, 393, 393, 394, 396, 396, 401]\n",
- "[77, 78, 78, 78, 79, 79, 80, 81, 81, 82]\n",
- "[163, 163, 163, 163, 163, 163, 163, 164, 164, 164]\n",
- "[216, 217, 217, 217, 218, 218, 218, 218, 218, 219]\n",
- "[61, 61, 62, 63, 63, 65, 66, 67, 67, 69]\n",
- "[139, 139, 139, 139, 140, 140, 140, 140, 140, 141]\n",
- "[51, 52, 53, 53, 54, 54, 54, 55, 55, 57]\n",
- "[353, 356, 356, 356, 356, 357, 358, 358, 359, 359]\n",
- "[152, 152, 153, 153, 154, 154, 154, 154, 154, 154]\n",
- "[130, 130, 131, 131, 131, 131, 131, 131, 132, 132]\n",
- "[199, 199, 200, 200, 200, 200, 201, 201, 201, 201]\n",
- "[188, 189, 189, 189, 190, 190, 190, 190, 191, 192]\n",
- "[172, 172, 173, 173, 173, 173, 174, 174, 174, 174]\n",
- "[168, 168, 168, 169, 169, 169, 169, 169, 169, 170]\n",
- "[421, 422, 423, 424, 427, 430, 431, 434, 435, 436]\n",
- "[363, 363, 364, 365, 370, 370, 371, 372, 373, 379]\n",
- "[183, 184, 184, 185, 186, 187, 187, 187, 187, 188]\n",
- "[344, 344, 345, 347, 347, 348, 349, 350, 351, 352]\n",
- "[583, 596, 599, 599, 602, 602, 606, 607, 616, 619]\n",
- "[626, 627, 627, 629, 629, 631, 635, 662, 664, 668]\n",
- "[176, 176, 177, 177, 177, 177, 177, 177, 178, 178]\n",
- "[128, 128, 128, 129, 129, 129, 129, 129, 129, 129]\n",
- "[157, 157, 158, 158, 158, 158, 158, 158, 159, 159]\n",
- "[137, 137, 137, 138, 138, 138, 138, 138, 139, 139]\n",
- "[146, 146, 146, 146, 146, 146, 146, 146, 147, 147]\n",
- "[494, 494, 495, 497, 498, 499, 500, 506, 507, 507]\n",
- "[436, 439, 439, 440, 442, 445, 447, 450, 454, 454]\n",
- "[118, 118, 118, 118, 118, 119, 119, 119, 119, 119]\n",
- "[91, 92, 92, 92, 93, 93, 93, 94, 94, 94]\n",
- "[307, 307, 307, 308, 310, 310, 311, 311, 312, 312]\n",
- "[333, 334, 334, 334, 338, 338, 339, 341, 341, 343]\n",
- "[87, 88, 89, 89, 89, 89, 90, 90, 90, 91]\n",
- "[256, 257, 257, 258, 258, 259, 260, 261, 261, 261]\n",
- "[308, 308, 308, 308, 309, 309, 309, 311, 312, 312]\n",
- "[110, 110, 110, 111, 111, 111, 111, 111, 112, 112]\n",
- "[355, 356, 356, 356, 357, 358, 361, 364, 364, 365]\n",
- "[121, 121, 122, 122, 122, 122, 123, 123, 123, 123]\n",
- "[42, 42, 43, 46, 47, 47, 47, 48, 50, 50]\n",
- "[75, 75, 75, 75, 75, 75, 76, 77, 78, 78]\n",
- "[244, 244, 245, 245, 245, 246, 246, 247, 248, 248]\n",
- "[138, 138, 138, 139, 140, 140, 140, 140, 140, 140]\n",
- "[323, 324, 325, 325, 328, 329, 329, 331, 332, 333]\n",
- "[131, 131, 131, 131, 132, 132, 132, 132, 132, 132]\n",
- "[105, 105, 105, 106, 106, 107, 107, 107, 108, 108]\n",
- "[345, 345, 346, 347, 347, 350, 352, 353, 353, 354]\n",
- "[134, 134, 135, 135, 135, 135, 135, 135, 136, 136]\n",
- "[313, 313, 313, 314, 314, 315, 316, 319, 321, 321]\n",
- "[155, 155, 156, 157, 157, 157, 157, 157, 158, 158]\n",
- "[152, 152, 152, 153, 153, 154, 154, 154, 154, 154]\n",
- "[283, 287, 287, 288, 290, 291, 292, 292, 293, 293]\n",
- "[88, 88, 89, 90, 90, 91, 91, 92, 93, 94]\n",
- "[109, 109, 109, 109, 109, 109, 109, 109, 109, 110]\n",
- "[51, 52, 52, 53, 55, 56, 56, 57, 57, 58]\n",
- "[518, 524, 525, 527, 527, 527, 527, 528, 534, 534]\n",
- "[435, 443, 443, 444, 455, 458, 459, 459, 460, 460]\n",
- "[134, 134, 134, 134, 134, 134, 134, 134, 134, 134]\n",
- "[158, 158, 158, 159, 159, 159, 159, 160, 160, 160]\n",
- "[132, 133, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[116, 116, 117, 117, 117, 117, 117, 117, 118, 118]\n",
- "[184, 184, 185, 185, 185, 186, 186, 186, 186, 186]\n",
- "[504, 505, 505, 507, 510, 510, 513, 514, 516, 516]\n",
- "[167, 168, 168, 168, 169, 169, 169, 169, 169, 170]\n",
- "[187, 187, 187, 187, 188, 188, 189, 189, 190, 191]\n",
- "[119, 119, 119, 119, 119, 119, 119, 119, 119, 119]\n",
- "[137, 138, 138, 138, 138, 138, 138, 138, 138, 138]\n",
- "[147, 148, 148, 148, 148, 148, 148, 148, 149, 149]\n",
- "[149, 150, 151, 151, 151, 151, 151, 152, 152, 152]\n",
- "[180, 180, 180, 181, 181, 182, 182, 182, 182, 182]\n",
- "[813, 814, 816, 817, 822, 841, 855, 856, 857, 879]\n",
- "[248, 249, 249, 250, 250, 250, 250, 250, 252, 252]\n",
- "[79, 79, 79, 79, 79, 80, 80, 80, 80, 80]\n",
- "[114, 114, 114, 114, 115, 115, 115, 115, 115, 116]\n",
- "[671, 682, 687, 695, 704, 706, 714, 716, 721, 724]\n",
- "[201, 201, 202, 202, 203, 203, 204, 204, 204, 206]\n",
- "[226, 228, 228, 228, 228, 229, 230, 230, 230, 231]\n",
- "[206, 207, 207, 208, 209, 209, 209, 209, 209, 211]\n",
- "[81, 81, 81, 81, 82, 82, 82, 83, 83, 83]\n",
- "[160, 161, 161, 161, 162, 162, 162, 162, 162, 162]\n",
- "[211, 212, 212, 212, 213, 214, 214, 214, 215, 215]\n",
- "[83, 83, 84, 85, 85, 85, 85, 87, 87, 88]\n",
- "[299, 300, 300, 302, 303, 305, 306, 306, 307, 307]\n",
- "[136, 136, 136, 136, 136, 136, 136, 136, 137, 137]\n",
- "[120, 120, 120, 120, 120, 120, 120, 121, 121, 121]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 131, 131]\n",
- "[390, 391, 394, 395, 396, 398, 401, 401, 403, 404]\n",
- "[170, 171, 171, 171, 171, 171, 171, 171, 171, 171]\n",
- "[58, 59, 59, 60, 60, 60, 61, 61, 61, 61]\n",
- "[462, 467, 469, 469, 470, 470, 471, 471, 476, 477]\n",
- "[334, 335, 336, 336, 337, 338, 338, 341, 342, 342]\n",
- "[269, 269, 269, 270, 271, 271, 271, 271, 271, 272]\n",
- "[194, 195, 195, 195, 195, 195, 196, 196, 197, 197]\n",
- "[899, 913, 964, 967, 982, 991, 993, 996, 997, 997]\n",
- "[733, 734, 751, 753, 755, 759, 763, 799, 805, 808]\n",
- "[128, 128, 129, 129, 129, 129, 129, 129, 129, 130]\n",
- "[379, 380, 381, 383, 384, 384, 385, 386, 389, 390]\n",
- "[278, 278, 278, 278, 280, 280, 280, 281, 281, 281]\n",
- "[144, 144, 145, 145, 145, 145, 145, 145, 145, 145]\n",
- "[146, 146, 146, 146, 146, 147, 147, 147, 147, 147]\n",
- "[171, 172, 172, 172, 172, 173, 173, 173, 174, 174]\n",
- "[191, 191, 191, 191, 191, 191, 191, 192, 193, 194]\n",
- "[197, 197, 197, 198, 198, 199, 200, 200, 201, 201]\n",
- "[141, 141, 141, 141, 141, 142, 142, 142, 142, 142]\n",
- "[175, 175, 176, 176, 176, 176, 177, 177, 177, 177]\n",
- "[100, 101, 102, 103, 104, 104, 104, 104, 105, 105]\n",
- "[272, 273, 273, 274, 275, 275, 276, 276, 277, 277]\n",
- "[126, 126, 126, 127, 127, 127, 127, 128, 128, 128]\n",
- "[123, 123, 123, 123, 123, 123, 124, 124, 124, 124]\n",
- "[182, 183, 183, 183, 183, 184, 184, 184, 184, 184]\n",
- "[418, 419, 425, 426, 430, 430, 431, 431, 432, 435]\n",
- "[94, 94, 94, 95, 95, 97, 97, 99, 99, 100]\n",
- "[63, 63, 63, 64, 64, 64, 66, 67, 68, 68]\n",
- "[630, 634, 635, 636, 637, 640, 641, 643, 654, 655]\n",
- "[70, 70, 70, 71, 71, 72, 73, 74, 74, 75]\n",
- "[261, 261, 262, 264, 265, 266, 267, 267, 267, 267]\n",
- "[585, 586, 588, 591, 593, 594, 597, 603, 608, 613]\n",
- "[124, 125, 125, 125, 125, 126, 126, 126, 126, 126]\n",
- "[293, 294, 294, 295, 297, 297, 298, 298, 298, 299]\n",
- "[222, 222, 223, 223, 223, 224, 224, 225, 225, 225]\n",
- "[216, 217, 218, 218, 218, 221, 221, 221, 221, 221]\n",
- "[366, 366, 368, 370, 370, 373, 373, 375, 376, 379]\n",
- "[232, 232, 232, 232, 233, 233, 234, 234, 235, 236]\n",
- "[236, 237, 238, 238, 238, 240, 241, 242, 242, 243]\n",
- "[118, 118, 118, 118, 118, 118, 118, 118, 118, 118]\n",
- "[112, 112, 112, 113, 113, 113, 113, 113, 114, 114]\n",
- "[142, 143, 143, 143, 143, 143, 143, 143, 144, 144]\n",
- "[177, 177, 178, 178, 178, 179, 179, 179, 179, 180]\n",
- "[252, 253, 253, 253, 254, 255, 255, 255, 256, 256]\n",
- "[163, 164, 164, 165, 165, 165, 166, 166, 166, 167]\n",
- "[18, 18, 26, 33, 33, 36, 36, 38, 40, 41]\n",
- "[479, 480, 481, 484, 489, 490, 491, 494, 496, 500]\n",
- "[535, 544, 547, 551, 554, 562, 563, 573, 575, 578]\n",
- "[405, 405, 406, 408, 411, 412, 414, 415, 418, 418]\n",
- "[420, 424, 425, 427, 427, 428, 428, 429, 430, 433]\n",
- "[315, 316, 316, 316, 316, 317, 317, 317, 318, 326]\n",
- "[53, 54, 54, 54, 55, 55, 55, 56, 56, 57]\n",
- "[234, 234, 234, 235, 235, 237, 237, 238, 241, 241]\n",
- "[445, 449, 450, 455, 456, 459, 460, 464, 468, 471]\n",
- "[160, 160, 160, 160, 160, 160, 161, 161, 161, 162]\n",
- "[191, 192, 192, 192, 192, 192, 193, 194, 195, 196]\n",
- "[176, 176, 177, 177, 177, 178, 178, 178, 178, 179]\n",
- "[162, 162, 162, 163, 163, 163, 163, 163, 164, 164]\n",
- "[166, 166, 167, 167, 167, 167, 167, 167, 167, 167]\n",
- "[79, 79, 79, 80, 81, 82, 82, 82, 82, 82]\n",
- "[129, 129, 129, 129, 129, 130, 130, 130, 130, 130]\n",
- "[296, 297, 297, 297, 298, 298, 301, 301, 302, 302]\n",
- "[112, 112, 112, 113, 113, 114, 114, 114, 115, 115]\n",
- "[304, 305, 305, 305, 306, 307, 307, 309, 310, 312]\n",
- "[344, 344, 345, 345, 346, 347, 352, 353, 354, 355]\n",
- "[327, 328, 328, 330, 331, 331, 331, 332, 333, 334]\n",
- "[116, 117, 117, 117, 117, 117, 118, 118, 118, 118]\n",
- "[760, 767, 780, 800, 807, 830, 832, 843, 890, 906]\n",
- "[148, 149, 149, 149, 150, 150, 150, 150, 150, 150]\n",
- "[335, 335, 336, 337, 337, 339, 339, 340, 341, 342]\n",
- "[102, 103, 103, 103, 103, 104, 104, 104, 105, 105]\n",
- "[488, 488, 493, 503, 504, 505, 506, 510, 513, 514]\n",
- "[151, 151, 151, 151, 151, 151, 151, 151, 152, 152]\n",
- "[120, 120, 120, 120, 121, 121, 121, 121, 121, 121]\n",
- "[215, 215, 215, 216, 216, 216, 216, 217, 217, 217]\n",
- "[174, 174, 174, 174, 174, 175, 175, 175, 176, 176]\n",
- "[124, 124, 124, 124, 124, 124, 124, 125, 125, 125]\n",
- "[230, 230, 230, 231, 231, 232, 232, 232, 233, 233]\n",
- "[152, 152, 152, 152, 153, 153, 153, 153, 153, 154]\n",
- "[127, 127, 127, 127, 128, 128, 128, 128, 128, 129]\n",
- "[170, 170, 170, 170, 170, 171, 171, 171, 171, 171]\n",
- "[180, 180, 181, 181, 182, 182, 183, 183, 183, 183]\n",
- "[671, 673, 674, 676, 682, 683, 686, 702, 711, 712]\n",
- "[434, 436, 437, 437, 438, 439, 440, 442, 443, 443]\n",
- "[241, 242, 242, 242, 243, 243, 246, 246, 247, 247]\n",
- "[158, 158, 158, 158, 159, 159, 159, 159, 159, 160]\n",
- "[144, 145, 145, 145, 145, 145, 145, 145, 145, 145]\n",
- "[213, 213, 213, 213, 213, 214, 214, 214, 214, 215]\n",
- "[534, 536, 540, 540, 544, 545, 549, 551, 554, 556]\n",
- "[87, 87, 88, 88, 89, 89, 89, 89, 89, 89]\n",
- "[202, 202, 202, 202, 202, 203, 203, 203, 203, 204]\n",
- "[184, 184, 185, 186, 186, 186, 186, 186, 187, 187]\n",
- "[179, 179, 179, 179, 180, 180, 180, 180, 180, 180]\n",
- "[204, 206, 206, 206, 207, 207, 207, 207, 208, 208]\n",
- "[105, 105, 105, 105, 107, 107, 107, 108, 108, 108]\n",
- "[123, 123, 123, 123, 123, 124, 124, 124, 124, 124]\n",
- "[515, 516, 518, 518, 520, 525, 527, 530, 531, 534]\n",
- "[66, 67, 68, 69, 69, 70, 71, 72, 73, 74]\n",
- "[365, 366, 366, 367, 369, 371, 371, 371, 372, 372]\n",
- "[258, 260, 261, 262, 262, 263, 264, 264, 264, 266]\n",
- "[12, 20, 24, 30, 31, 38, 39, 39, 41, 42]\n",
- "[134, 134, 134, 134, 134, 135, 135, 135, 135, 135]\n",
- "[139, 139, 139, 139, 139, 139, 139, 140, 140, 140]\n",
- "[164, 164, 165, 165, 165, 165, 165, 166, 166, 166]\n",
- "[154, 154, 154, 154, 155, 155, 156, 156, 156, 156]\n",
- "[115, 115, 115, 116, 116, 116, 116, 116, 116, 116]\n",
- "[145, 146, 146, 146, 146, 147, 147, 147, 147, 147]\n",
- "[272, 272, 272, 273, 276, 276, 276, 278, 279, 279]\n",
- "[248, 249, 251, 251, 251, 251, 253, 253, 253, 253]\n",
- "[266, 267, 267, 267, 268, 268, 269, 269, 270, 271]\n",
- "[141, 141, 141, 142, 142, 142, 142, 142, 142, 142]\n",
- "[138, 138, 138, 138, 138, 138, 139, 139, 139, 139]\n",
- "[125, 125, 126, 126, 126, 126, 126, 127, 127, 127]\n",
- "[47, 48, 49, 49, 50, 50, 50, 51, 52, 53]\n",
- "[911, 919, 931, 931, 968, 969, 979, 993, 995, 1000]\n",
- "[118, 119, 119, 119, 119, 119, 119, 120, 120, 120]\n",
- "[108, 108, 109, 109, 109, 110, 110, 110, 110, 111]\n",
- "[187, 187, 187, 187, 188, 189, 190, 190, 191, 191]\n",
- "[222, 223, 225, 226, 227, 227, 229, 229, 229, 229]\n",
- "[400, 401, 401, 403, 410, 412, 412, 416, 416, 419]\n",
- "[389, 393, 394, 394, 395, 397, 398, 399, 399, 399]\n",
- "[84, 84, 85, 85, 85, 86, 86, 86, 86, 87]\n",
- "[156, 157, 157, 157, 157, 157, 157, 158, 158, 158]\n",
- "[356, 356, 357, 358, 359, 359, 359, 360, 363, 365]\n",
- "[147, 147, 147, 148, 148, 148, 148, 148, 148, 148]\n",
- "[42, 43, 43, 44, 44, 44, 45, 46, 46, 47]\n",
- "[96, 97, 98, 98, 99, 99, 100, 101, 101, 101]\n",
- "[121, 121, 122, 122, 122, 122, 122, 122, 123, 123]\n",
- "[74, 75, 76, 76, 76, 76, 77, 77, 78, 78]\n",
- "[142, 143, 143, 143, 144, 144, 144, 144, 144, 144]\n",
- "[200, 200, 200, 200, 201, 201, 201, 201, 201, 202]\n",
- "[287, 288, 290, 290, 292, 292, 293, 293, 293, 293]\n",
- "[372, 373, 373, 375, 377, 377, 378, 380, 385, 388]\n",
- "[171, 171, 172, 172, 173, 173, 173, 173, 173, 173]\n",
- "[561, 562, 563, 563, 569, 569, 576, 576, 577, 582]\n",
- "[111, 111, 111, 112, 112, 112, 112, 112, 112, 112]\n",
- "[254, 255, 255, 255, 255, 256, 256, 258, 258, 258]\n",
- "[209, 210, 210, 210, 210, 211, 211, 211, 212, 212]\n",
- "[167, 168, 169, 169, 169, 169, 169, 169, 169, 170]\n",
- "[135, 136, 136, 136, 136, 137, 137, 137, 137, 137]\n",
- "[196, 197, 197, 198, 198, 198, 199, 199, 199, 199]\n",
- "[280, 280, 280, 280, 282, 282, 283, 283, 284, 284]\n",
- "[90, 91, 92, 92, 92, 92, 93, 94, 95, 95]\n",
- "[57, 60, 60, 62, 64, 64, 65, 66, 66, 66]\n",
- "[130, 131, 131, 131, 131, 131, 131, 131, 132, 132]\n",
- "[601, 606, 608, 613, 615, 636, 642, 651, 656, 658]\n",
- "[132, 132, 132, 132, 132, 133, 133, 133, 133, 133]\n",
- "[218, 219, 219, 219, 219, 219, 219, 221, 222, 222]\n",
- "[471, 474, 475, 476, 479, 480, 481, 481, 482, 484]\n",
- "[266, 267, 267, 268, 268, 270, 272, 273, 273, 273]\n",
- "[247, 247, 248, 248, 249, 250, 250, 250, 251, 252]\n",
- "[149, 149, 149, 149, 149, 150, 150, 150, 150, 150]\n",
- "[199, 199, 200, 200, 200, 201, 201, 201, 201, 202]\n",
- "[110, 111, 111, 111, 111, 111, 112, 112, 112, 112]\n",
- "[93, 93, 94, 95, 95, 96, 96, 97, 97, 97]\n",
- "[410, 413, 417, 420, 421, 422, 422, 424, 426, 427]\n",
- "[223, 224, 225, 225, 226, 227, 227, 227, 228, 228]\n",
- "[72, 72, 74, 74, 75, 76, 76, 76, 77, 77]\n",
- "[30, 36, 37, 39, 42, 42, 42, 43, 43, 43]\n",
- "[535, 540, 541, 544, 550, 552, 555, 557, 558, 559]\n",
- "[43, 45, 45, 46, 46, 47, 47, 49, 49, 50]\n",
- "[384, 387, 387, 392, 395, 396, 398, 398, 399, 403]\n",
- "[150, 150, 151, 151, 151, 151, 151, 152, 152, 152]\n",
- "[162, 163, 163, 163, 163, 164, 164, 164, 164, 164]\n",
- "[197, 197, 197, 198, 198, 198, 198, 199, 199, 199]\n",
- "[164, 164, 164, 164, 165, 165, 165, 166, 166, 166]\n",
- "[315, 315, 316, 317, 317, 318, 320, 320, 321, 322]\n",
- "[877, 898, 899, 919, 953, 960, 979, 986, 1008, 1398]\n",
- "[185, 185, 186, 187, 187, 187, 187, 188, 188, 188]\n",
- "[127, 127, 127, 127, 127, 127, 127, 127, 127, 127]\n",
- "[122, 122, 122, 122, 122, 122, 122, 123, 123, 124]\n",
- "[115, 115, 116, 116, 116, 117, 117, 117, 117, 117]\n",
- "[403, 404, 404, 405, 405, 405, 406, 407, 408, 410]\n",
- "[768, 772, 789, 789, 791, 813, 834, 836, 837, 838]\n",
- "[189, 190, 190, 191, 192, 192, 192, 192, 192, 193]\n",
- "[428, 430, 430, 431, 433, 436, 439, 439, 439, 444]\n",
- "[109, 109, 109, 109, 109, 110, 110, 110, 110, 110]\n",
- "[125, 125, 125, 125, 125, 125, 125, 126, 126, 126]\n",
- "[206, 206, 207, 207, 207, 208, 208, 209, 209, 209]\n",
- "[132, 132, 133, 133, 133, 133, 133, 133, 134, 134]\n",
- "[468, 469, 471, 473, 473, 476, 478, 480, 480, 481]\n",
- "[347, 348, 353, 353, 355, 362, 362, 363, 365, 366]\n",
- "[210, 210, 210, 211, 211, 211, 211, 212, 213, 213]\n",
- "[119, 119, 119, 120, 120, 120, 120, 120, 120, 120]\n",
- "[686, 686, 686, 696, 699, 706, 713, 715, 716, 723]\n",
- "[166, 167, 167, 167, 167, 167, 167, 167, 168, 168]\n",
- "[203, 203, 203, 204, 204, 205, 206, 206, 206, 206]\n",
- "[290, 294, 294, 295, 296, 297, 300, 301, 301, 301]\n",
- "[193, 193, 194, 194, 195, 195, 196, 196, 196, 196]\n",
- "[130, 130, 130, 130, 131, 131, 131, 131, 132, 132]\n",
- "[142, 143, 143, 143, 143, 144, 144, 144, 144, 145]\n",
- "[262, 263, 263, 264, 264, 264, 265, 266, 266, 266]\n",
- "[146, 146, 146, 146, 146, 147, 147, 148, 148, 149]\n",
- "[483, 486, 486, 486, 487, 487, 490, 492, 496, 503]\n",
- "[120, 120, 120, 121, 121, 121, 121, 121, 122, 122]\n",
- "[139, 139, 139, 139, 139, 139, 139, 139, 140, 140]\n",
- "[331, 333, 336, 339, 340, 341, 341, 343, 343, 347]\n",
- "[255, 258, 258, 258, 258, 259, 259, 260, 261, 262]\n",
- "[134, 135, 135, 135, 135, 136, 136, 136, 136, 136]\n",
- "[308, 310, 311, 311, 311, 313, 313, 314, 315, 315]\n",
- "[124, 124, 124, 124, 125, 125, 125, 125, 125, 125]\n",
- "[77, 78, 79, 79, 79, 79, 80, 80, 81, 81]\n",
- "[169, 169, 169, 169, 169, 169, 170, 170, 170, 170]\n",
- "[126, 126, 126, 126, 126, 126, 126, 126, 126, 127]\n",
- "[152, 152, 153, 153, 153, 154, 154, 154, 154, 154]\n",
- "[728, 733, 733, 734, 737, 745, 752, 757, 761, 765]\n",
- "[618, 634, 635, 635, 636, 643, 663, 668, 673, 685]\n",
- "[181, 181, 181, 181, 182, 182, 182, 183, 183, 183]\n",
- "[213, 214, 214, 215, 215, 216, 217, 217, 217, 217]\n",
- "[504, 506, 506, 512, 512, 516, 518, 522, 526, 526]\n",
- "[170, 171, 171, 171, 171, 171, 172, 172, 172, 172]\n",
- "[274, 274, 275, 275, 275, 276, 277, 277, 278, 278]\n",
- "[128, 128, 128, 128, 128, 129, 129, 129, 129, 130]\n",
- "[103, 103, 104, 104, 104, 105, 105, 105, 105, 106]\n",
- "[324, 326, 328, 328, 329, 329, 330, 330, 331, 331]\n",
- "[252, 253, 253, 253, 254, 254, 254, 254, 254, 255]\n",
- "[172, 173, 173, 174, 174, 175, 175, 175, 175, 175]\n",
- "[107, 107, 107, 107, 107, 108, 108, 108, 108, 108]\n",
- "[140, 140, 140, 140, 141, 141, 142, 142, 142, 142]\n",
- "[60, 60, 63, 63, 63, 64, 64, 67, 68, 68]\n",
- "[374, 376, 376, 377, 382, 383, 383, 384, 384, 384]\n",
- "[367, 368, 369, 370, 371, 371, 372, 372, 372, 373]\n",
- "[160, 160, 160, 160, 160, 160, 161, 161, 161, 162]\n",
- "[69, 69, 70, 70, 70, 70, 70, 71, 71, 71]\n",
- "[562, 568, 577, 577, 578, 588, 604, 610, 613, 618]\n",
- "[449, 451, 451, 451, 453, 456, 457, 459, 463, 465]\n",
- "[88, 89, 90, 90, 90, 90, 90, 91, 91, 92]\n",
- "[154, 154, 155, 156, 156, 156, 156, 156, 157, 157]\n",
- "[233, 234, 234, 234, 234, 235, 237, 237, 237, 237]\n",
- "[145, 145, 145, 145, 145, 145, 145, 145, 146, 146]\n",
- "[97, 98, 99, 101, 101, 101, 101, 102, 103, 103]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 138, 138]\n",
- "[279, 280, 280, 281, 281, 281, 281, 282, 282, 283]\n",
- "[238, 238, 239, 239, 239, 239, 239, 240, 240, 241]\n",
- "[183, 183, 183, 183, 184, 184, 184, 185, 185, 185]\n",
- "[177, 178, 178, 178, 178, 178, 179, 180, 180, 181]\n",
- "[302, 303, 304, 304, 305, 305, 306, 306, 307, 307]\n",
- "[158, 158, 158, 158, 158, 158, 158, 159, 159, 160]\n",
- "[241, 242, 243, 243, 243, 244, 246, 246, 246, 246]\n",
- "[117, 118, 118, 118, 118, 118, 119, 119, 119, 119]\n",
- "[283, 284, 285, 286, 287, 287, 288, 288, 289, 289]\n",
- "[229, 229, 229, 229, 229, 230, 230, 231, 232, 232]\n",
- "[51, 51, 52, 52, 54, 55, 55, 55, 56, 57]\n",
- "[138, 138, 138, 138, 138, 138, 138, 138, 138, 138]\n",
- "[218, 219, 219, 220, 220, 221, 221, 221, 223, 223]\n",
- "[176, 176, 176, 176, 176, 177, 177, 177, 177, 177]\n",
- "[82, 83, 83, 83, 83, 84, 85, 86, 86, 88]\n",
- "[57, 57, 58, 58, 58, 58, 59, 59, 59, 60]\n",
- "[113, 113, 113, 114, 114, 114, 114, 114, 114, 115]\n",
- "[654, 662, 672, 674, 675, 680, 688, 695, 701, 703]\n",
- "[17, 22, 25, 33, 34, 35, 36, 37, 38, 40]\n",
- "[436, 444, 444, 444, 445, 450, 456, 458, 461, 462]\n",
- "[41, 41, 41, 42, 42, 43, 44, 44, 45, 45]\n",
- "[167, 167, 167, 168, 168, 168, 168, 168, 168, 169]\n",
- "[151, 151, 151, 151, 151, 151, 151, 151, 152, 152]\n",
- "[352, 354, 355, 357, 360, 361, 362, 367, 367, 369]\n",
- "[370, 371, 371, 374, 374, 376, 376, 377, 384, 385]\n",
- "[221, 221, 221, 221, 222, 222, 222, 223, 223, 223]\n",
- "[52, 53, 54, 54, 54, 54, 54, 55, 55, 55]\n",
- "[115, 115, 115, 116, 116, 116, 116, 116, 117, 117]\n",
- "[140, 141, 141, 141, 142, 142, 142, 142, 142, 143]\n",
- "[176, 177, 177, 177, 177, 178, 178, 179, 179, 179]\n",
- "[193, 193, 194, 194, 194, 194, 195, 195, 195, 195]\n",
- "[138, 138, 138, 138, 138, 138, 138, 138, 138, 139]\n",
- "[409, 410, 410, 412, 413, 413, 414, 418, 420, 423]\n",
- "[128, 128, 128, 128, 128, 128, 128, 128, 128, 128]\n",
- "[117, 117, 117, 118, 118, 118, 118, 118, 118, 118]\n",
- "[92, 92, 93, 96, 96, 96, 97, 98, 99, 99]\n",
- "[241, 242, 242, 243, 243, 243, 243, 243, 243, 243]\n",
- "[317, 318, 319, 319, 320, 320, 322, 323, 323, 325]\n",
- "[183, 183, 184, 184, 184, 184, 186, 186, 187, 187]\n",
- "[276, 277, 278, 278, 279, 279, 280, 281, 282, 282]\n",
- "[113, 113, 114, 114, 114, 114, 115, 115, 115, 115]\n",
- "[132, 132, 132, 132, 132, 133, 133, 133, 133, 133]\n",
- "[334, 337, 338, 340, 341, 342, 344, 345, 346, 346]\n",
- "[607, 610, 616, 625, 626, 641, 648, 649, 651, 653]\n",
- "[295, 296, 296, 297, 297, 299, 302, 306, 306, 306]\n",
- "[566, 567, 569, 572, 581, 582, 589, 596, 602, 605]\n",
- "[102, 102, 103, 103, 103, 103, 103, 103, 103, 103]\n",
- "[169, 169, 169, 170, 170, 170, 170, 170, 170, 170]\n",
- "[88, 89, 89, 90, 90, 91, 91, 91, 91, 92]\n",
- "[75, 75, 75, 76, 77, 77, 78, 79, 79, 81]\n",
- "[139, 139, 139, 139, 139, 139, 140, 140, 140, 140]\n",
- "[146, 146, 147, 147, 147, 147, 148, 148, 148, 148]\n",
- "[133, 134, 134, 134, 134, 134, 135, 135, 135, 135]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 130, 131]\n",
- "[119, 119, 119, 119, 119, 119, 120, 120, 120, 120]\n",
- "[122, 122, 122, 122, 123, 123, 123, 123, 123, 123]\n",
- "[484, 485, 486, 491, 492, 494, 495, 500, 500, 507]\n",
- "[233, 233, 234, 234, 234, 234, 235, 235, 236, 236]\n",
- "[171, 171, 171, 172, 172, 172, 172, 172, 173, 173]\n",
- "[213, 214, 215, 215, 215, 215, 215, 216, 217, 217]\n",
- "[266, 267, 268, 268, 268, 268, 269, 270, 272, 275]\n",
- "[125, 125, 125, 125, 125, 126, 126, 126, 126, 126]\n",
- "[179, 179, 180, 181, 181, 182, 182, 182, 182, 183]\n",
- "[228, 228, 228, 229, 229, 229, 230, 230, 231, 232]\n",
- "[967, 976, 989, 992, 992, 993, 993, 998, 1001, 1013]\n",
- "[290, 291, 291, 292, 292, 292, 292, 292, 295, 295]\n",
- "[56, 58, 59, 59, 59, 60, 60, 60, 60, 61]\n",
- "[326, 328, 328, 329, 330, 333, 333, 333, 334, 334]\n",
- "[70, 70, 70, 70, 71, 72, 72, 72, 72, 74]\n",
- "[158, 158, 158, 159, 159, 159, 160, 160, 160, 161]\n",
- "[135, 135, 135, 135, 135, 135, 136, 136, 136, 136]\n",
- "[831, 837, 848, 861, 863, 868, 874, 893, 947, 962]\n",
- "[388, 390, 393, 393, 394, 395, 395, 397, 400, 400]\n",
- "[127, 127, 127, 127, 127, 127, 127, 127, 127, 127]\n",
- "[532, 533, 538, 541, 546, 550, 556, 561, 563, 565]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 137, 138]\n",
- "[424, 425, 426, 427, 427, 427, 427, 429, 431, 433]\n",
- "[346, 347, 348, 350, 350, 350, 351, 351, 352, 352]\n",
- "[283, 284, 284, 284, 285, 285, 285, 286, 288, 289]\n",
- "[131, 131, 131, 131, 131, 131, 132, 132, 132, 132]\n",
- "[402, 403, 403, 404, 404, 404, 405, 406, 407, 408]\n",
- "[129, 129, 129, 129, 129, 129, 129, 130, 130, 130]\n",
- "[199, 199, 199, 200, 200, 201, 201, 201, 201, 201]\n",
- "[463, 465, 467, 468, 469, 470, 474, 480, 481, 482]\n",
- "[82, 83, 83, 84, 84, 85, 87, 87, 87, 87]\n",
- "[173, 174, 174, 174, 174, 174, 174, 175, 175, 175]\n",
- "[195, 195, 196, 196, 196, 197, 198, 198, 198, 198]\n",
- "[224, 225, 225, 225, 226, 226, 227, 227, 228, 228]\n",
- "[62, 62, 63, 65, 66, 66, 66, 67, 68, 68]\n",
- "[704, 709, 731, 732, 744, 775, 780, 790, 796, 816]\n",
- "[104, 105, 105, 106, 107, 107, 107, 107, 107, 108]\n",
- "[162, 162, 163, 163, 163, 164, 164, 164, 164, 164]\n",
- "[202, 202, 202, 203, 204, 205, 206, 207, 207, 207]\n",
- "[217, 217, 217, 218, 218, 218, 218, 219, 219, 220]\n",
- "[99, 99, 101, 101, 101, 101, 102, 102, 102, 102]\n",
- "[253, 255, 257, 259, 260, 260, 261, 262, 263, 265]\n",
- "[148, 148, 149, 149, 150, 150, 150, 150, 150, 150]\n",
- "[124, 124, 124, 124, 124, 124, 124, 125, 125, 125]\n",
- "[311, 311, 311, 312, 312, 312, 313, 313, 313, 314]\n",
- "[247, 247, 247, 247, 250, 250, 250, 250, 251, 251]\n",
- "[236, 238, 238, 239, 239, 240, 241, 241, 241, 241]\n",
- "[507, 508, 509, 510, 512, 515, 520, 525, 525, 529]\n",
- "[208, 208, 209, 209, 210, 211, 211, 212, 213, 213]\n",
- "[164, 164, 164, 164, 164, 165, 165, 165, 166, 167]\n",
- "[153, 153, 153, 153, 153, 153, 154, 154, 155, 155]\n",
- "[244, 245, 245, 246, 246, 246, 246, 246, 246, 247]\n",
- "[161, 161, 161, 161, 161, 161, 162, 162, 162, 162]\n",
- "[120, 120, 120, 121, 121, 121, 122, 122, 122, 122]\n",
- "[111, 111, 111, 111, 111, 112, 112, 112, 112, 113]\n",
- "[190, 190, 191, 191, 191, 192, 192, 192, 193, 193]\n",
- "[144, 144, 144, 145, 145, 145, 145, 145, 146, 146]\n",
- "[46, 46, 47, 47, 47, 48, 48, 50, 51, 52]\n",
- "[155, 155, 156, 156, 156, 156, 156, 157, 157, 158]\n",
- "[187, 187, 188, 188, 188, 189, 189, 189, 190, 190]\n",
- "[220, 220, 220, 220, 220, 220, 220, 221, 221, 221]\n",
- "[306, 306, 306, 307, 307, 307, 308, 309, 309, 310]\n",
- "[108, 108, 109, 110, 110, 110, 110, 110, 111, 111]\n",
- "[142, 142, 142, 142, 143, 143, 143, 143, 143, 143]\n",
- "[156, 156, 156, 157, 157, 157, 157, 157, 157, 157]\n",
- "[851, 867, 894, 910, 916, 939, 977, 997, 1007, 1010]\n",
- "[451, 452, 453, 457, 458, 459, 461, 463, 465, 465]\n",
- "[201, 202, 203, 203, 203, 204, 204, 204, 204, 205]\n",
- "[229, 229, 231, 233, 233, 234, 234, 234, 234, 235]\n",
- "[172, 172, 172, 172, 173, 173, 173, 173, 173, 174]\n",
- "[124, 124, 124, 124, 125, 125, 125, 125, 125, 126]\n",
- "[223, 223, 224, 224, 225, 225, 226, 226, 229, 229]\n",
- "[215, 215, 217, 217, 217, 217, 218, 218, 219, 219]\n",
- "[277, 278, 278, 279, 280, 280, 281, 281, 282, 283]\n",
- "[140, 140, 140, 140, 140, 140, 141, 141, 141, 141]\n",
- "[311, 311, 312, 312, 313, 314, 314, 314, 315, 315]\n",
- "[148, 148, 148, 149, 149, 149, 149, 150, 150, 150]\n",
- "[258, 259, 259, 259, 259, 260, 261, 261, 261, 261]\n",
- "[637, 637, 640, 649, 652, 657, 665, 666, 667, 668]\n",
- "[272, 274, 274, 274, 274, 274, 274, 275, 275, 275]\n",
- "[264, 267, 267, 267, 268, 268, 269, 271, 271, 271]\n",
- "[150, 150, 150, 150, 151, 151, 151, 151, 151, 151]\n",
- "[139, 139, 139, 139, 139, 139, 139, 139, 140, 140]\n",
- "[178, 178, 178, 179, 179, 179, 179, 179, 179, 180]\n",
- "[370, 371, 374, 374, 377, 378, 378, 379, 384, 385]\n",
- "[559, 561, 570, 570, 575, 575, 585, 591, 592, 600]\n",
- "[111, 111, 111, 111, 111, 112, 112, 112, 113, 113]\n",
- "[261, 261, 262, 262, 262, 263, 264, 264, 264, 264]\n",
- "[180, 181, 181, 181, 181, 182, 182, 182, 182, 183]\n",
- "[239, 240, 240, 240, 241, 241, 241, 243, 243, 243]\n",
- "[185, 185, 185, 186, 187, 187, 187, 188, 188, 188]\n",
- "[121, 121, 121, 122, 122, 122, 122, 122, 122, 122]\n",
- "[145, 145, 145, 146, 146, 146, 146, 146, 146, 147]\n",
- "[474, 479, 480, 480, 480, 483, 484, 485, 486, 489]\n",
- "[315, 316, 316, 317, 319, 320, 320, 321, 321, 328]\n",
- "[329, 331, 334, 334, 335, 335, 335, 336, 337, 338]\n",
- "[152, 152, 152, 152, 153, 153, 153, 153, 154, 154]\n",
- "[244, 244, 245, 245, 246, 246, 246, 246, 246, 248]\n",
- "[196, 196, 196, 196, 197, 197, 198, 199, 199, 199]\n",
- "[670, 678, 680, 691, 700, 703, 712, 719, 726, 734]\n",
- "[174, 175, 175, 176, 176, 176, 177, 177, 178, 178]\n",
- "[101, 102, 103, 103, 103, 103, 103, 104, 104, 106]\n",
- "[141, 141, 141, 142, 142, 142, 142, 142, 142, 142]\n",
- "[132, 132, 132, 132, 132, 133, 133, 133, 133, 133]\n",
- "[513, 517, 518, 518, 525, 530, 531, 531, 552, 556]\n",
- "[294, 295, 295, 297, 298, 299, 299, 300, 303, 303]\n",
- "[122, 122, 122, 122, 123, 123, 123, 123, 123, 123]\n",
- "[249, 249, 250, 252, 253, 253, 254, 255, 257, 257]\n",
- "[134, 134, 134, 134, 134, 134, 134, 134, 135, 135]\n",
- "[70, 70, 71, 72, 73, 73, 74, 75, 76, 76]\n",
- "[183, 183, 184, 184, 184, 185, 185, 185, 185, 185]\n",
- "[119, 119, 119, 119, 119, 119, 120, 120, 121, 121]\n",
- "[143, 144, 144, 144, 144, 144, 144, 145, 145, 145]\n",
- "[88, 88, 88, 89, 89, 89, 90, 90, 90, 90]\n",
- "[54, 54, 54, 55, 56, 56, 56, 56, 56, 57]\n",
- "[64, 64, 65, 66, 66, 66, 66, 67, 67, 69]\n",
- "[164, 164, 164, 164, 165, 165, 165, 165, 166, 166]\n",
- "[107, 107, 108, 108, 108, 108, 108, 109, 109, 109]\n",
- "[166, 166, 167, 167, 168, 168, 168, 169, 169, 170]\n",
- "[130, 131, 131, 131, 131, 132, 132, 132, 132, 132]\n",
- "[129, 129, 129, 130, 130, 130, 130, 130, 130, 130]\n",
- "[97, 97, 98, 98, 100, 100, 101, 101, 101, 101]\n",
- "[137, 138, 138, 138, 138, 138, 138, 138, 138, 139]\n",
- "[170, 170, 170, 170, 170, 170, 170, 170, 171, 172]\n",
- "[209, 210, 210, 210, 210, 211, 211, 211, 211, 211]\n",
- "[135, 135, 135, 135, 136, 136, 136, 136, 137, 137]\n",
- "[191, 191, 191, 192, 193, 193, 194, 195, 195, 195]\n",
- "[82, 82, 82, 83, 83, 85, 85, 87, 87, 87]\n",
- "[106, 106, 106, 106, 106, 107, 107, 107, 107, 107]\n",
- "[50, 51, 51, 51, 52, 52, 52, 52, 53, 53]\n",
- "[117, 117, 117, 118, 118, 118, 118, 118, 118, 119]\n",
- "[128, 128, 128, 128, 129, 129, 129, 129, 129, 129]\n",
- "[199, 199, 199, 200, 200, 200, 200, 201, 201, 201]\n",
- "[739, 739, 750, 754, 758, 761, 802, 805, 821, 832]\n",
- "[490, 491, 494, 495, 497, 497, 498, 510, 510, 512]\n",
- "[188, 188, 189, 189, 189, 190, 190, 190, 190, 190]\n",
- "[154, 154, 154, 154, 155, 155, 155, 155, 155, 156]\n",
- "[109, 109, 109, 109, 110, 110, 110, 110, 111, 111]\n",
- "[147, 147, 147, 147, 147, 147, 148, 148, 148, 148]\n",
- "[235, 235, 236, 236, 236, 236, 237, 237, 239, 239]\n",
- "[91, 92, 93, 94, 95, 95, 95, 95, 95, 96]\n",
- "[113, 114, 114, 115, 115, 115, 115, 115, 116, 116]\n",
- "[161, 161, 161, 161, 161, 162, 163, 163, 163, 163]\n",
- "[358, 360, 363, 363, 364, 364, 365, 365, 366, 366]\n",
- "[416, 418, 419, 421, 425, 427, 427, 429, 432, 433]\n",
- "[212, 213, 213, 214, 214, 214, 214, 215, 215, 215]\n",
- "[600, 607, 615, 616, 617, 617, 622, 622, 629, 636]\n",
- "[303, 303, 303, 305, 306, 307, 309, 310, 310, 310]\n",
- "[339, 340, 340, 340, 344, 346, 346, 347, 351, 351]\n",
- "[126, 127, 127, 127, 127, 127, 128, 128, 128, 128]\n",
- "[433, 435, 439, 439, 440, 440, 445, 445, 445, 449]\n",
- "[205, 206, 206, 206, 207, 207, 208, 208, 209, 209]\n",
- "[287, 287, 288, 288, 289, 290, 291, 292, 293, 294]\n",
- "[351, 353, 353, 353, 355, 356, 356, 357, 357, 358]\n",
- "[77, 78, 78, 78, 78, 79, 79, 79, 82, 82]\n",
- "[219, 219, 220, 220, 221, 221, 221, 222, 222, 223]\n",
- "[385, 388, 392, 393, 398, 402, 406, 408, 412, 412]\n",
- "[468, 469, 470, 470, 471, 471, 471, 472, 473, 473]\n",
- "[158, 158, 158, 158, 158, 158, 158, 159, 159, 160]\n",
- "[116, 116, 116, 116, 117, 117, 117, 117, 117, 117]\n",
- "[57, 57, 58, 58, 58, 59, 60, 60, 62, 63]\n",
- "[12, 29, 36, 36, 39, 41, 42, 43, 43, 43]\n",
- "[44, 44, 44, 45, 46, 46, 47, 48, 48, 48]\n",
- "[715, 724, 727, 732, 755, 756, 768, 771, 779, 795]\n",
- "[291, 293, 293, 294, 295, 295, 296, 297, 297, 297]\n",
- "[160, 160, 161, 161, 161, 161, 161, 161, 162, 162]\n",
- "[102, 103, 104, 104, 105, 105, 105, 105, 106, 106]\n",
- "[263, 263, 264, 264, 265, 265, 265, 266, 266, 266]\n",
- "[165, 165, 165, 165, 165, 165, 166, 166, 167, 167]\n",
- "[230, 230, 230, 231, 231, 231, 231, 231, 232, 233]\n",
- "[800, 843, 853, 856, 866, 874, 874, 876, 884, 890]\n",
- "[49, 49, 50, 50, 51, 52, 52, 52, 53, 53]\n",
- "[247, 248, 248, 249, 249, 249, 249, 251, 252, 252]\n",
- "[185, 185, 185, 185, 185, 186, 186, 186, 186, 187]\n",
- "[268, 269, 269, 269, 270, 270, 270, 271, 271, 272]\n",
- "[84, 87, 89, 89, 90, 90, 90, 91, 91, 92]\n",
- "[318, 322, 323, 324, 324, 324, 324, 324, 325, 327]\n",
- "[581, 587, 588, 595, 601, 603, 605, 611, 612, 617]\n",
- "[138, 138, 138, 138, 138, 139, 139, 140, 140, 140]\n",
- "[298, 298, 299, 299, 300, 301, 303, 304, 304, 304]\n",
- "[140, 140, 140, 141, 141, 141, 141, 141, 141, 141]\n",
- "[218, 218, 219, 219, 220, 220, 220, 220, 221, 221]\n",
- "[366, 366, 368, 371, 372, 375, 375, 376, 377, 377]\n",
- "[151, 151, 151, 152, 152, 152, 152, 152, 153, 153]\n",
- "[334, 334, 334, 335, 337, 338, 338, 339, 344, 345]\n",
- "[548, 549, 552, 552, 554, 554, 555, 563, 574, 581]\n",
- "[163, 163, 163, 164, 164, 164, 164, 164, 165, 165]\n",
- "[119, 119, 120, 120, 121, 121, 121, 121, 121, 121]\n",
- "[214, 214, 214, 214, 214, 215, 215, 216, 216, 217]\n",
- "[128, 128, 129, 129, 129, 130, 130, 131, 131, 131]\n",
- "[62, 62, 63, 64, 64, 64, 67, 69, 69, 71]\n",
- "[205, 205, 205, 205, 206, 206, 206, 206, 207, 207]\n",
- "[901, 917, 924, 953, 964, 977, 978, 981, 995, 1074]\n",
- "[208, 208, 208, 210, 211, 211, 212, 212, 213, 213]\n",
- "[124, 124, 124, 124, 124, 125, 125, 125, 125, 125]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 138, 138]\n",
- "[143, 143, 143, 143, 144, 144, 144, 144, 144, 144]\n",
- "[131, 131, 131, 131, 131, 131, 131, 132, 132, 132]\n",
- "[178, 179, 179, 180, 180, 181, 181, 181, 181, 181]\n",
- "[447, 447, 448, 449, 450, 452, 453, 454, 455, 455]\n",
- "[114, 114, 115, 115, 115, 115, 116, 116, 116, 116]\n",
- "[112, 112, 112, 112, 112, 113, 114, 114, 114, 114]\n",
- "[167, 167, 168, 168, 169, 169, 169, 169, 169, 170]\n",
- "[71, 72, 72, 74, 75, 75, 75, 76, 76, 76]\n",
- "[133, 133, 133, 133, 133, 133, 133, 134, 134, 134]\n",
- "[286, 287, 287, 287, 287, 287, 289, 290, 290, 290]\n",
- "[116, 116, 116, 117, 117, 117, 117, 117, 117, 117]\n",
- "[190, 190, 190, 191, 191, 192, 192, 192, 193, 193]\n",
- "[125, 125, 126, 126, 126, 126, 126, 126, 126, 126]\n",
- "[118, 118, 118, 119, 119, 119, 119, 119, 119, 119]\n",
- "[283, 283, 284, 284, 284, 285, 285, 285, 285, 285]\n",
- "[201, 201, 201, 201, 201, 202, 203, 204, 204, 204]\n",
- "[233, 234, 235, 236, 236, 236, 237, 238, 238, 239]\n",
- "[348, 350, 351, 351, 356, 359, 362, 363, 364, 364]\n",
- "[193, 194, 194, 194, 194, 194, 195, 195, 195, 195]\n",
- "[401, 403, 404, 406, 406, 409, 410, 411, 411, 413]\n",
- "[379, 381, 382, 384, 385, 389, 390, 399, 399, 401]\n",
- "[57, 57, 58, 59, 59, 60, 60, 60, 61, 61]\n",
- "[498, 501, 503, 504, 510, 519, 526, 527, 530, 547]\n",
- "[680, 681, 688, 692, 693, 696, 697, 708, 711, 714]\n",
- "[141, 142, 142, 142, 142, 142, 142, 142, 143, 143]\n",
- "[170, 170, 170, 171, 171, 172, 172, 172, 173, 174]\n",
- "[416, 417, 418, 418, 419, 421, 421, 423, 427, 427]\n",
- "[117, 117, 117, 117, 117, 118, 118, 118, 118, 118]\n",
- "[253, 253, 255, 257, 258, 258, 259, 262, 262, 262]\n",
- "[224, 224, 225, 225, 225, 225, 226, 226, 227, 227]\n",
- "[456, 457, 461, 462, 477, 479, 482, 483, 484, 486]\n",
- "[228, 228, 228, 228, 229, 229, 229, 230, 230, 230]\n",
- "[153, 153, 153, 153, 153, 154, 154, 154, 154, 154]\n",
- "[221, 221, 222, 222, 222, 222, 223, 223, 223, 224]\n",
- "[197, 198, 198, 199, 199, 200, 200, 200, 200, 200]\n",
- "[174, 174, 174, 177, 177, 178, 178, 178, 178, 178]\n",
- "[147, 148, 148, 148, 148, 149, 149, 149, 149, 149]\n",
- "[134, 134, 135, 135, 135, 135, 135, 135, 136, 136]\n",
- "[126, 127, 127, 127, 127, 127, 127, 128, 128, 128]\n",
- "[82, 82, 82, 83, 83, 83, 83, 84, 84, 84]\n",
- "[158, 158, 158, 158, 158, 158, 159, 159, 159, 160]\n",
- "[32, 33, 33, 34, 35, 38, 40, 40, 42, 42]\n",
- "[109, 109, 110, 110, 110, 110, 111, 111, 112, 112]\n",
- "[428, 429, 430, 433, 434, 436, 437, 441, 442, 446]\n",
- "[239, 240, 240, 241, 241, 241, 242, 242, 242, 242]\n",
- "[272, 272, 273, 273, 274, 274, 276, 277, 279, 282]\n",
- "[92, 93, 93, 94, 94, 94, 96, 96, 97, 97]\n",
- "[619, 622, 628, 640, 664, 671, 674, 674, 676, 679]\n",
- "[195, 195, 195, 196, 196, 196, 196, 196, 196, 196]\n",
- "[243, 244, 245, 246, 246, 246, 246, 247, 247, 247]\n",
- "[154, 155, 155, 155, 156, 156, 156, 156, 156, 156]\n",
- "[98, 99, 99, 100, 100, 101, 101, 102, 102, 102]\n",
- "[43, 43, 44, 44, 45, 46, 47, 47, 48, 49]\n",
- "[121, 121, 122, 122, 122, 122, 122, 122, 122, 123]\n",
- "[327, 327, 330, 330, 331, 331, 331, 332, 333, 334]\n",
- "[132, 132, 132, 132, 132, 132, 132, 133, 133, 133]\n",
- "[305, 308, 309, 309, 310, 310, 310, 311, 311, 311]\n",
- "[156, 156, 156, 156, 156, 157, 157, 157, 157, 157]\n",
- "[187, 187, 187, 187, 188, 188, 189, 189, 189, 189]\n",
- "[144, 145, 146, 146, 146, 147, 147, 147, 147, 147]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 123, 124]\n",
- "[77, 77, 77, 77, 78, 78, 78, 79, 79, 82]\n",
- "[149, 150, 150, 150, 150, 150, 150, 151, 151, 151]\n",
- "[53, 53, 54, 54, 54, 54, 55, 55, 56, 57]\n",
- "[312, 312, 312, 313, 314, 314, 315, 315, 317, 317]\n",
- "[106, 107, 107, 107, 107, 107, 108, 108, 108, 108]\n",
- "[181, 182, 182, 182, 183, 183, 184, 184, 184, 185]\n",
- "[55, 55, 55, 56, 56, 56, 56, 57, 57, 57]\n",
- "[178, 179, 179, 179, 180, 180, 180, 180, 181, 181]\n",
- "[69, 70, 70, 70, 72, 72, 73, 73, 73, 73]\n",
- "[289, 290, 291, 291, 292, 292, 293, 294, 295, 295]\n",
- "[157, 157, 158, 158, 158, 158, 158, 159, 159, 159]\n",
- "[265, 266, 266, 266, 268, 269, 269, 270, 270, 270]\n",
- "[344, 344, 346, 346, 346, 346, 350, 352, 353, 359]\n",
- "[170, 171, 172, 172, 172, 173, 173, 173, 173, 173]\n",
- "[135, 135, 135, 136, 136, 136, 136, 136, 136, 136]\n",
- "[140, 140, 140, 140, 140, 141, 141, 141, 141, 141]\n",
- "[376, 376, 379, 387, 387, 389, 389, 390, 390, 391]\n",
- "[163, 164, 164, 164, 164, 164, 164, 164, 165, 165]\n",
- "[255, 256, 256, 257, 258, 260, 261, 261, 261, 262]\n",
- "[335, 336, 337, 338, 341, 341, 342, 342, 343, 344]\n",
- "[237, 237, 238, 238, 238, 238, 239, 239, 240, 241]\n",
- "[176, 177, 177, 177, 177, 177, 177, 178, 178, 178]\n",
- "[905, 966, 977, 989, 992, 996, 998, 1000, 1005, 1364]\n",
- "[296, 296, 297, 297, 297, 299, 300, 301, 301, 301]\n",
- "[104, 105, 105, 105, 106, 106, 107, 107, 107, 107]\n",
- "[127, 127, 127, 127, 127, 127, 127, 127, 127, 127]\n",
- "[61, 61, 61, 62, 62, 63, 64, 65, 65, 66]\n",
- "[97, 97, 98, 99, 101, 101, 102, 103, 103, 103]\n",
- "[27, 29, 31, 32, 36, 37, 39, 39, 39, 39]\n",
- "[582, 584, 586, 588, 590, 593, 595, 613, 619, 621]\n",
- "[226, 226, 226, 226, 227, 227, 227, 227, 227, 228]\n",
- "[48, 49, 49, 49, 50, 50, 50, 51, 52, 52]\n",
- "[88, 89, 89, 89, 89, 89, 90, 90, 92, 92]\n",
- "[39, 41, 42, 43, 43, 45, 45, 45, 46, 46]\n",
- "[466, 467, 467, 467, 467, 470, 481, 481, 486, 487]\n",
- "[138, 139, 139, 139, 139, 139, 139, 139, 139, 139]\n",
- "[262, 262, 262, 263, 263, 263, 263, 264, 264, 265]\n",
- "[304, 305, 305, 306, 307, 308, 308, 309, 310, 310]\n",
- "[409, 410, 412, 422, 423, 428, 429, 429, 430, 431]\n",
- "[431, 433, 434, 435, 437, 441, 442, 448, 451, 458]\n",
- "[173, 173, 173, 174, 174, 175, 175, 175, 175, 175]\n",
- "[113, 113, 113, 114, 114, 114, 114, 115, 115, 115]\n",
- "[123, 123, 123, 123, 123, 124, 124, 124, 124, 124]\n",
- "[191, 192, 192, 192, 192, 192, 193, 193, 194, 194]\n",
- "[183, 183, 183, 183, 184, 184, 184, 184, 184, 185]\n",
- "[144, 144, 144, 145, 145, 145, 145, 145, 146, 146]\n",
- "[360, 361, 361, 361, 363, 364, 364, 364, 365, 365]\n",
- "[82, 82, 82, 83, 84, 84, 84, 87, 88, 88]\n",
- "[136, 136, 136, 136, 136, 137, 137, 137, 137, 137]\n",
- "[73, 74, 74, 76, 78, 79, 79, 79, 79, 82]\n",
- "[93, 94, 94, 94, 94, 95, 95, 95, 97, 97]\n",
- "[67, 67, 67, 67, 68, 68, 68, 68, 69, 69]\n",
- "[57, 58, 58, 58, 58, 59, 59, 59, 59, 60]\n",
- "[199, 199, 199, 201, 202, 202, 203, 203, 203, 203]\n",
- "[185, 185, 185, 186, 186, 186, 186, 187, 187, 187]\n",
- "[124, 124, 124, 125, 125, 125, 125, 125, 125, 125]\n",
- "[126, 126, 126, 126, 126, 126, 126, 126, 126, 127]\n",
- "[153, 154, 154, 154, 154, 154, 154, 155, 155, 155]\n",
- "[161, 161, 162, 162, 162, 162, 162, 162, 162, 163]\n",
- "[210, 210, 211, 211, 211, 212, 212, 212, 212, 212]\n",
- "[120, 120, 120, 121, 121, 121, 122, 122, 122, 122]\n",
- "[130, 130, 131, 131, 131, 131, 132, 132, 132, 132]\n",
- "[283, 283, 283, 284, 285, 285, 287, 287, 288, 289]\n",
- "[806, 813, 815, 821, 824, 849, 850, 871, 877, 878]\n",
- "[555, 560, 561, 565, 570, 570, 571, 571, 575, 580]\n",
- "[189, 189, 189, 189, 189, 190, 190, 190, 190, 190]\n",
- "[624, 631, 635, 639, 648, 649, 656, 658, 672, 673]\n",
- "[156, 156, 156, 157, 157, 157, 157, 157, 157, 157]\n",
- "[165, 165, 165, 166, 166, 166, 166, 167, 167, 167]\n",
- "[229, 230, 230, 231, 231, 231, 233, 235, 237, 237]\n",
- "[247, 247, 248, 248, 249, 250, 250, 251, 252, 252]\n",
- "[159, 159, 159, 160, 160, 161, 161, 161, 161, 161]\n",
- "[149, 149, 149, 149, 149, 150, 150, 150, 150, 151]\n",
- "[271, 272, 274, 275, 275, 276, 277, 278, 279, 280]\n",
- "[252, 252, 253, 253, 253, 254, 254, 254, 255, 255]\n",
- "[128, 128, 128, 129, 129, 129, 129, 130, 130, 130]\n",
- "[206, 206, 207, 207, 207, 209, 209, 210, 210, 210]\n",
- "[220, 220, 220, 220, 221, 221, 221, 222, 223, 225]\n",
- "[365, 366, 366, 367, 368, 368, 368, 369, 370, 372]\n",
- "[146, 146, 146, 146, 146, 146, 146, 147, 147, 147]\n",
- "[215, 216, 216, 217, 218, 218, 218, 218, 219, 219]\n",
- "[142, 142, 142, 142, 143, 143, 143, 144, 144, 144]\n",
- "[491, 492, 495, 495, 499, 500, 503, 503, 517, 518]\n",
- "[194, 195, 195, 195, 195, 196, 196, 197, 198, 198]\n",
- "[147, 148, 148, 148, 148, 148, 148, 148, 149, 149]\n",
- "[117, 117, 117, 118, 118, 118, 118, 118, 118, 118]\n",
- "[151, 151, 152, 152, 152, 153, 153, 153, 153, 153]\n",
- "[241, 241, 242, 242, 245, 246, 246, 247, 247, 247]\n",
- "[111, 111, 111, 111, 111, 112, 112, 112, 112, 113]\n",
- "[132, 132, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[119, 119, 119, 119, 119, 119, 119, 119, 119, 120]\n",
- "[187, 187, 187, 187, 187, 188, 188, 188, 189, 189]\n",
- "[518, 522, 523, 524, 532, 537, 540, 545, 554, 555]\n",
- "[213, 213, 213, 213, 213, 213, 214, 215, 215, 215]\n",
- "[168, 168, 169, 169, 169, 169, 169, 170, 170, 170]\n",
- "[137, 137, 137, 137, 138, 138, 138, 138, 138, 138]\n",
- "[133, 134, 134, 135, 135, 135, 135, 135, 135, 135]\n",
- "[204, 204, 204, 204, 204, 205, 205, 206, 206, 206]\n",
- "[673, 679, 683, 686, 688, 690, 691, 693, 706, 715]\n",
- "[127, 128, 128, 128, 128, 128, 128, 128, 128, 128]\n",
- "[323, 323, 326, 327, 328, 329, 331, 332, 333, 333]\n",
- "[392, 394, 395, 399, 399, 400, 404, 404, 407, 407]\n",
- "[107, 108, 108, 109, 110, 110, 110, 110, 111, 111]\n",
- "[115, 115, 115, 115, 116, 116, 116, 117, 117, 117]\n",
- "[311, 311, 315, 315, 316, 316, 317, 320, 321, 321]\n",
- "[724, 727, 730, 739, 761, 768, 785, 785, 786, 787]\n",
- "[95, 95, 95, 97, 97, 97, 98, 100, 100, 100]\n",
- "[190, 190, 190, 191, 192, 193, 194, 194, 194, 194]\n",
- "[166, 166, 166, 166, 166, 166, 166, 167, 167, 167]\n",
- "[336, 340, 340, 342, 345, 345, 346, 346, 347, 348]\n",
- "[170, 170, 170, 170, 171, 171, 171, 172, 172, 172]\n",
- "[357, 359, 360, 362, 363, 365, 366, 368, 368, 369]\n",
- "[143, 143, 143, 143, 143, 143, 143, 144, 144, 144]\n",
- "[159, 159, 159, 159, 160, 160, 160, 160, 161, 161]\n",
- "[43, 43, 44, 44, 44, 44, 45, 46, 46, 47]\n",
- "[218, 218, 219, 219, 219, 220, 221, 221, 221, 222]\n",
- "[321, 321, 321, 323, 323, 325, 325, 325, 326, 327]\n",
- "[174, 174, 175, 175, 175, 175, 176, 176, 176, 176]\n",
- "[117, 118, 118, 118, 118, 118, 118, 118, 118, 119]\n",
- "[164, 165, 165, 165, 165, 165, 165, 165, 165, 165]\n",
- "[111, 111, 111, 111, 111, 112, 112, 112, 113, 113]\n",
- "[198, 198, 198, 198, 199, 199, 199, 200, 200, 200]\n",
- "[73, 74, 74, 75, 76, 77, 78, 78, 78, 79]\n",
- "[155, 155, 155, 156, 156, 156, 156, 157, 157, 157]\n",
- "[328, 328, 329, 329, 330, 332, 333, 333, 334, 334]\n",
- "[370, 371, 372, 374, 376, 378, 379, 382, 383, 384]\n",
- "[172, 172, 172, 172, 173, 173, 173, 173, 173, 174]\n",
- "[144, 144, 144, 144, 144, 144, 145, 145, 145, 145]\n",
- "[754, 770, 779, 780, 793, 797, 807, 821, 874, 883]\n",
- "[108, 108, 108, 108, 109, 109, 109, 109, 110, 110]\n",
- "[128, 128, 128, 128, 129, 129, 129, 129, 129, 129]\n",
- "[187, 187, 187, 188, 189, 189, 189, 189, 189, 190]\n",
- "[119, 119, 120, 120, 121, 121, 121, 121, 121, 121]\n",
- "[135, 135, 135, 135, 135, 135, 135, 136, 136, 136]\n",
- "[401, 406, 408, 408, 408, 413, 414, 417, 418, 419]\n",
- "[244, 245, 245, 246, 247, 247, 247, 247, 248, 248]\n",
- "[69, 69, 69, 69, 70, 70, 71, 73, 73, 73]\n",
- "[440, 444, 446, 446, 447, 448, 448, 449, 455, 457]\n",
- "[63, 63, 63, 63, 64, 64, 65, 67, 67, 67]\n",
- "[79, 80, 82, 83, 83, 85, 85, 86, 86, 86]\n",
- "[148, 148, 149, 149, 149, 149, 150, 150, 150, 150]\n",
- "[122, 122, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[86, 87, 88, 88, 88, 90, 91, 92, 92, 95]\n",
- "[153, 153, 154, 154, 154, 154, 154, 154, 154, 155]\n",
- "[917, 927, 965, 988, 996, 999, 1000, 1001, 1001, 1196]\n",
- "[127, 127, 127, 127, 127, 127, 127, 128, 128, 128]\n",
- "[157, 157, 158, 158, 158, 158, 158, 158, 158, 158]\n",
- "[177, 177, 177, 178, 178, 178, 178, 178, 178, 178]\n",
- "[146, 146, 146, 146, 146, 146, 147, 147, 147, 147]\n",
- "[384, 385, 385, 388, 390, 393, 394, 395, 396, 399]\n",
- "[621, 622, 625, 639, 645, 652, 665, 665, 671, 685]\n",
- "[275, 278, 278, 278, 281, 281, 281, 282, 282, 282]\n",
- "[208, 209, 209, 210, 210, 210, 211, 211, 212, 213]\n",
- "[580, 585, 590, 593, 594, 595, 602, 605, 612, 614]\n",
- "[180, 181, 181, 181, 182, 183, 183, 183, 184, 184]\n",
- "[688, 707, 714, 716, 730, 733, 745, 746, 747, 753]\n",
- "[167, 167, 168, 168, 169, 169, 169, 169, 169, 169]\n",
- "[302, 303, 304, 304, 305, 305, 306, 306, 307, 308]\n",
- "[113, 113, 113, 113, 113, 113, 114, 114, 114, 114]\n",
- "[213, 214, 214, 215, 215, 215, 215, 216, 216, 216]\n",
- "[134, 134, 134, 134, 134, 134, 134, 134, 134, 135]\n",
- "[121, 121, 121, 121, 122, 122, 122, 122, 122, 122]\n",
- "[201, 201, 201, 201, 202, 202, 202, 202, 203, 203]\n",
- "[464, 465, 466, 472, 472, 480, 484, 484, 485, 488]\n",
- "[51, 51, 53, 53, 53, 54, 54, 55, 57, 57]\n",
- "[162, 162, 162, 162, 163, 163, 163, 163, 164, 164]\n",
- "[223, 223, 224, 224, 224, 225, 226, 226, 227, 227]\n",
- "[129, 129, 129, 129, 130, 130, 130, 130, 130, 130]\n",
- "[261, 262, 262, 262, 263, 264, 264, 266, 266, 266]\n",
- "[283, 284, 288, 291, 291, 291, 292, 292, 293, 293]\n",
- "[104, 104, 104, 104, 104, 104, 105, 107, 107, 108]\n",
- "[100, 101, 102, 102, 102, 103, 103, 103, 103, 104]\n",
- "[140, 141, 141, 141, 141, 141, 142, 142, 142, 143]\n",
- "[126, 126, 126, 126, 126, 126, 126, 126, 126, 127]\n",
- "[47, 48, 49, 50, 50, 50, 50, 50, 50, 51]\n",
- "[178, 178, 179, 179, 179, 180, 180, 180, 180, 180]\n",
- "[231, 231, 231, 232, 233, 234, 235, 235, 235, 236]\n",
- "[131, 132, 132, 133, 133, 133, 133, 134, 134, 134]\n",
- "[267, 268, 269, 269, 269, 270, 270, 270, 271, 271]\n",
- "[294, 295, 296, 296, 298, 299, 300, 301, 301, 301]\n",
- "[136, 136, 136, 136, 137, 137, 137, 137, 137, 137]\n",
- "[151, 151, 151, 152, 152, 152, 152, 152, 153, 153]\n",
- "[236, 236, 236, 237, 237, 238, 239, 239, 241, 241]\n",
- "[242, 242, 242, 243, 243, 243, 243, 244, 244, 244]\n",
- "[195, 195, 195, 196, 196, 196, 196, 197, 197, 198]\n",
- "[185, 186, 186, 186, 186, 186, 186, 187, 187, 187]\n",
- "[252, 253, 254, 254, 256, 256, 259, 259, 260, 260]\n",
- "[124, 124, 124, 125, 125, 125, 125, 125, 125, 126]\n",
- "[130, 130, 131, 131, 131, 131, 131, 131, 131, 131]\n",
- "[348, 349, 351, 352, 352, 352, 352, 354, 354, 356]\n",
- "[60, 60, 61, 61, 61, 61, 61, 62, 62, 63]\n",
- "[137, 137, 138, 138, 138, 139, 139, 139, 139, 139]\n",
- "[271, 272, 272, 272, 273, 273, 274, 274, 274, 274]\n",
- "[227, 227, 228, 228, 229, 229, 229, 229, 230, 230]\n",
- "[147, 147, 147, 147, 148, 148, 148, 148, 148, 148]\n",
- "[203, 204, 204, 204, 204, 204, 204, 206, 206, 207]\n",
- "[114, 114, 114, 114, 114, 114, 115, 115, 115, 116]\n",
- "[139, 139, 139, 139, 139, 139, 140, 140, 140, 140]\n",
- "[528, 532, 537, 542, 557, 563, 565, 567, 567, 573]\n",
- "[250, 250, 251, 251, 251, 251, 251, 251, 251, 252]\n",
- "[28, 33, 39, 39, 40, 40, 41, 41, 42, 42]\n",
- "[496, 496, 502, 514, 517, 518, 520, 523, 525, 526]\n",
- "[420, 425, 425, 428, 428, 432, 436, 439, 439, 440]\n",
- "[116, 116, 116, 116, 117, 117, 117, 117, 117, 117]\n",
- "[58, 58, 58, 58, 58, 59, 59, 60, 60, 60]\n",
- "[309, 310, 310, 311, 313, 316, 317, 317, 320, 320]\n",
- "[99, 99, 100, 100, 101, 101, 102, 102, 103, 104]\n",
- "[117, 117, 117, 117, 117, 118, 118, 118, 118, 118]\n",
- "[581, 586, 587, 590, 591, 593, 597, 601, 611, 619]\n",
- "[137, 138, 138, 138, 138, 138, 138, 138, 138, 138]\n",
- "[47, 47, 47, 49, 49, 50, 51, 52, 54, 54]\n",
- "[457, 459, 460, 460, 461, 464, 466, 468, 473, 476]\n",
- "[138, 139, 139, 139, 139, 139, 140, 140, 140, 140]\n",
- "[156, 156, 156, 156, 156, 156, 156, 157, 157, 157]\n",
- "[169, 170, 170, 170, 171, 171, 171, 171, 172, 172]\n",
- "[55, 55, 57, 57, 57, 57, 57, 57, 58, 58]\n",
- "[363, 363, 366, 367, 370, 372, 375, 375, 376, 377]\n",
- "[59, 59, 60, 60, 60, 61, 61, 61, 62, 62]\n",
- "[254, 254, 255, 255, 256, 256, 256, 256, 256, 257]\n",
- "[341, 341, 341, 342, 344, 345, 351, 351, 351, 352]\n",
- "[143, 143, 143, 144, 144, 144, 144, 144, 144, 145]\n",
- "[157, 157, 157, 158, 158, 158, 158, 158, 159, 159]\n",
- "[108, 109, 109, 109, 110, 110, 110, 110, 110, 111]\n",
- "[131, 131, 131, 132, 132, 132, 132, 132, 132, 132]\n",
- "[145, 146, 146, 146, 146, 146, 146, 146, 147, 147]\n",
- "[478, 480, 486, 486, 493, 493, 495, 501, 508, 516]\n",
- "[125, 125, 125, 126, 126, 126, 127, 127, 127, 127]\n",
- "[124, 124, 124, 124, 124, 124, 125, 125, 125, 125]\n",
- "[353, 356, 357, 357, 357, 357, 358, 360, 361, 363]\n",
- "[245, 246, 246, 246, 246, 247, 248, 248, 249, 249]\n",
- "[63, 63, 63, 64, 64, 66, 67, 67, 67, 67]\n",
- "[121, 121, 121, 121, 122, 122, 122, 122, 122, 122]\n",
- "[104, 105, 105, 105, 105, 106, 107, 107, 107, 107]\n",
- "[189, 190, 190, 190, 191, 192, 192, 192, 193, 193]\n",
- "[420, 426, 426, 428, 428, 429, 432, 432, 433, 434]\n",
- "[135, 135, 136, 136, 136, 136, 136, 136, 137, 137]\n",
- "[975, 984, 986, 988, 993, 995, 999, 1010, 1522, 1601]\n",
- "[134, 134, 134, 134, 134, 135, 135, 135, 135, 135]\n",
- "[163, 164, 164, 164, 165, 165, 165, 166, 167, 167]\n",
- "[160, 160, 161, 161, 161, 162, 162, 163, 163, 163]\n",
- "[179, 179, 180, 180, 180, 180, 180, 180, 180, 181]\n",
- "[284, 284, 285, 286, 287, 288, 289, 289, 291, 292]\n",
- "[96, 96, 96, 96, 97, 97, 97, 97, 98, 99]\n",
- "[145, 145, 145, 145, 145, 145, 145, 145, 145, 145]\n",
- "[198, 199, 199, 199, 199, 199, 199, 200, 201, 201]\n",
- "[176, 176, 177, 177, 177, 178, 178, 178, 179, 179]\n",
- "[83, 83, 84, 85, 85, 87, 88, 88, 88, 90]\n",
- "[132, 132, 133, 133, 133, 133, 133, 133, 134, 134]\n",
- "[228, 229, 229, 230, 230, 230, 230, 231, 232, 232]\n",
- "[186, 187, 187, 187, 187, 188, 188, 188, 189, 189]\n",
- "[405, 406, 406, 409, 409, 409, 411, 413, 417, 417]\n",
- "[77, 80, 80, 81, 81, 82, 82, 83, 83, 83]\n",
- "[267, 268, 269, 271, 272, 272, 272, 272, 273, 276]\n",
- "[392, 393, 394, 395, 396, 396, 398, 400, 404, 404]\n",
- "[183, 183, 184, 184, 185, 185, 185, 186, 186, 186]\n",
- "[167, 167, 168, 168, 168, 169, 169, 169, 169, 169]\n",
- "[278, 278, 279, 279, 280, 281, 283, 283, 283, 284]\n",
- "[209, 211, 212, 212, 213, 213, 213, 213, 214, 214]\n",
- "[153, 153, 154, 154, 154, 154, 154, 154, 154, 155]\n",
- "[149, 150, 150, 150, 150, 150, 150, 150, 151, 151]\n",
- "[72, 73, 74, 75, 75, 75, 75, 76, 76, 77]\n",
- "[155, 155, 155, 155, 155, 155, 156, 156, 156, 156]\n",
- "[377, 377, 378, 379, 380, 385, 387, 388, 390, 392]\n",
- "[214, 214, 215, 215, 215, 215, 216, 217, 217, 217]\n",
- "[667, 669, 669, 674, 677, 688, 699, 700, 708, 711]\n",
- "[140, 140, 140, 140, 140, 140, 140, 140, 141, 141]\n",
- "[40, 41, 42, 43, 44, 44, 44, 45, 46, 46]\n",
- "[240, 241, 242, 242, 243, 243, 243, 244, 244, 244]\n",
- "[147, 147, 147, 148, 148, 148, 148, 148, 149, 149]\n",
- "[111, 111, 111, 111, 111, 111, 112, 112, 112, 112]\n",
- "[326, 327, 328, 331, 333, 335, 338, 340, 340, 341]\n",
- "[181, 181, 181, 182, 182, 182, 182, 182, 182, 183]\n",
- "[559, 560, 562, 566, 566, 567, 569, 569, 575, 576]\n",
- "[159, 159, 159, 159, 160, 160, 160, 160, 160, 160]\n",
- "[193, 193, 194, 194, 194, 195, 195, 196, 196, 197]\n",
- "[249, 249, 250, 250, 250, 253, 253, 253, 254, 254]\n",
- "[304, 307, 307, 307, 307, 308, 308, 308, 309, 310]\n",
- "[232, 232, 233, 233, 235, 236, 236, 237, 238, 239]\n",
- "[129, 129, 130, 130, 130, 130, 130, 130, 130, 131]\n",
- "[292, 292, 292, 294, 294, 295, 295, 295, 296, 297]\n",
- "[298, 298, 298, 298, 300, 300, 302, 303, 303, 303]\n",
- "[128, 128, 128, 128, 128, 129, 129, 129, 129, 129]\n",
- "[262, 262, 263, 263, 263, 263, 264, 264, 265, 266]\n",
- "[151, 151, 151, 152, 152, 152, 152, 152, 152, 153]\n",
- "[115, 115, 115, 116, 116, 116, 116, 117, 117, 117]\n",
- "[713, 716, 717, 719, 730, 751, 766, 773, 783, 791]\n",
- "[435, 435, 436, 436, 437, 439, 449, 452, 455, 456]\n",
- "[173, 173, 173, 174, 174, 174, 175, 175, 176, 176]\n",
- "[799, 801, 811, 816, 827, 827, 835, 837, 858, 873]\n",
- "[258, 259, 259, 260, 260, 260, 261, 261, 261, 262]\n",
- "[531, 537, 538, 539, 544, 545, 549, 550, 552, 557]\n",
- "[122, 122, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[312, 312, 313, 314, 314, 314, 314, 316, 316, 318]\n",
- "[206, 206, 207, 207, 207, 207, 208, 208, 208, 209]\n",
- "[142, 142, 142, 143, 143, 143, 143, 143, 143, 143]\n",
- "[218, 220, 220, 220, 220, 220, 221, 221, 222, 222]\n",
- "[623, 623, 631, 639, 651, 657, 660, 661, 661, 663]\n",
- "[112, 113, 113, 113, 114, 114, 114, 115, 115, 115]\n",
- "[67, 68, 68, 68, 69, 69, 70, 70, 70, 70]\n",
- "[119, 119, 119, 119, 119, 119, 119, 120, 120, 121]\n",
- "[202, 203, 203, 203, 203, 204, 204, 205, 205, 205]\n",
- "[17, 24, 28, 32, 34, 36, 37, 37, 38, 38]\n",
- "[91, 91, 91, 92, 93, 93, 94, 94, 96, 96]\n",
- "[883, 898, 919, 923, 944, 953, 953, 968, 970, 974]\n",
- "[127, 127, 127, 127, 127, 128, 128, 128, 128, 128]\n",
- "[222, 222, 224, 224, 224, 226, 226, 226, 227, 228]\n",
- "[137, 137, 138, 138, 138, 138, 139, 139, 139, 139]\n",
- "[247, 247, 249, 249, 250, 251, 251, 252, 252, 252]\n",
- "[141, 141, 141, 142, 142, 142, 142, 142, 142, 142]\n",
- "[111, 111, 112, 112, 113, 113, 113, 113, 113, 113]\n",
- "[142, 142, 143, 143, 143, 143, 143, 144, 144, 144]\n",
- "[136, 136, 136, 136, 136, 136, 136, 137, 137, 137]\n",
- "[150, 150, 151, 151, 151, 151, 151, 151, 151, 151]\n",
- "[208, 208, 208, 208, 209, 209, 209, 210, 211, 211]\n",
- "[123, 123, 123, 123, 123, 123, 124, 124, 125, 125]\n",
- "[133, 133, 133, 134, 134, 134, 134, 134, 134, 134]\n",
- "[134, 134, 134, 135, 135, 135, 135, 135, 135, 135]\n",
- "[125, 125, 125, 125, 125, 125, 125, 125, 125, 125]\n",
- "[241, 242, 243, 244, 245, 245, 246, 246, 246, 247]\n",
- "[131, 131, 132, 132, 132, 132, 133, 133, 133, 133]\n",
- "[325, 326, 326, 326, 327, 328, 329, 329, 331, 335]\n",
- "[121, 121, 122, 122, 122, 122, 123, 123, 123, 123]\n",
- "[191, 191, 192, 193, 193, 193, 194, 194, 195, 195]\n",
- "[126, 126, 126, 126, 127, 127, 127, 127, 127, 127]\n",
- "[635, 639, 641, 667, 668, 670, 681, 691, 692, 699]\n",
- "[586, 588, 589, 595, 606, 609, 609, 618, 622, 632]\n",
- "[264, 264, 266, 267, 267, 267, 268, 268, 269, 270]\n",
- "[313, 314, 315, 315, 315, 316, 317, 318, 318, 320]\n",
- "[116, 116, 117, 117, 117, 117, 118, 118, 118, 118]\n",
- "[490, 493, 494, 495, 496, 500, 500, 500, 501, 501]\n",
- "[188, 188, 188, 189, 189, 189, 190, 190, 190, 190]\n",
- "[815, 817, 819, 825, 837, 838, 845, 858, 884, 891]\n",
- "[30, 34, 34, 35, 36, 38, 39, 39, 39, 40]\n",
- "[174, 174, 174, 174, 175, 175, 176, 176, 176, 176]\n",
- "[226, 226, 226, 226, 226, 227, 227, 227, 228, 228]\n",
- "[93, 93, 93, 95, 95, 96, 96, 97, 97, 97]\n",
- "[62, 64, 64, 65, 65, 65, 66, 66, 66, 66]\n",
- "[452, 455, 455, 456, 458, 458, 460, 461, 462, 466]\n",
- "[110, 110, 110, 110, 110, 110, 111, 111, 111, 111]\n",
- "[145, 146, 146, 147, 147, 147, 147, 147, 147, 148]\n",
- "[198, 198, 198, 198, 199, 199, 200, 200, 200, 200]\n",
- "[179, 179, 179, 180, 180, 181, 181, 182, 183, 183]\n",
- "[127, 128, 128, 128, 128, 128, 128, 129, 129, 129]\n",
- "[154, 155, 155, 155, 155, 155, 155, 155, 155, 155]\n",
- "[195, 196, 197, 197, 197, 197, 197, 198, 198, 198]\n",
- "[467, 467, 473, 475, 477, 478, 479, 479, 487, 487]\n",
- "[144, 144, 144, 144, 144, 144, 145, 145, 145, 145]\n",
- "[99, 100, 100, 100, 100, 100, 101, 101, 101, 101]\n",
- "[156, 156, 156, 157, 157, 157, 157, 157, 157, 158]\n",
- "[320, 321, 322, 322, 323, 323, 324, 324, 324, 325]\n",
- "[252, 253, 254, 255, 255, 258, 259, 259, 259, 260]\n",
- "[102, 102, 102, 102, 103, 103, 104, 104, 104, 105]\n",
- "[158, 158, 158, 159, 159, 159, 159, 159, 160, 160]\n",
- "[220, 224, 224, 225, 225, 225, 226, 226, 226, 226]\n",
- "[217, 218, 218, 218, 218, 218, 219, 219, 219, 220]\n",
- "[306, 307, 307, 309, 309, 310, 311, 312, 312, 313]\n",
- "[297, 298, 299, 299, 300, 300, 301, 302, 303, 305]\n",
- "[185, 186, 186, 187, 187, 187, 187, 187, 188, 188]\n",
- "[54, 54, 55, 56, 57, 57, 60, 60, 60, 61]\n",
- "[163, 163, 164, 164, 164, 164, 164, 164, 165, 165]\n",
- "[204, 204, 204, 205, 205, 206, 206, 206, 206, 206]\n",
- "[76, 76, 76, 76, 76, 76, 77, 78, 78, 78]\n",
- "[228, 230, 230, 231, 231, 231, 232, 233, 233, 233]\n",
- "[555, 556, 559, 564, 566, 567, 570, 572, 573, 583]\n",
- "[86, 87, 88, 89, 90, 90, 91, 91, 91, 92]\n",
- "[184, 184, 184, 184, 185, 185, 185, 185, 185, 185]\n",
- "[201, 201, 202, 202, 202, 202, 203, 204, 204, 204]\n",
- "[67, 67, 67, 67, 68, 69, 69, 69, 69, 69]\n",
- "[71, 71, 72, 73, 73, 74, 75, 75, 75, 76]\n",
- "[705, 717, 722, 736, 756, 765, 784, 786, 803, 811]\n",
- "[337, 339, 341, 342, 342, 343, 343, 344, 346, 348]\n",
- "[114, 115, 115, 115, 115, 115, 115, 115, 115, 116]\n",
- "[177, 177, 178, 178, 178, 178, 178, 179, 179, 179]\n",
- "[118, 119, 119, 119, 119, 119, 119, 120, 120, 120]\n",
- "[289, 289, 291, 291, 291, 292, 293, 293, 294, 297]\n",
- "[426, 428, 429, 432, 432, 433, 436, 439, 446, 451]\n",
- "[120, 120, 120, 120, 121, 121, 121, 121, 121, 121]\n",
- "[108, 108, 108, 108, 108, 109, 109, 109, 110, 110]\n",
- "[49, 49, 49, 50, 52, 53, 53, 54, 54, 54]\n",
- "[170, 170, 171, 171, 171, 171, 171, 171, 171, 171]\n",
- "[211, 213, 213, 213, 214, 214, 216, 217, 217, 217]\n",
- "[151, 152, 152, 152, 152, 152, 153, 154, 154, 154]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 131, 131]\n",
- "[165, 165, 166, 166, 166, 167, 167, 168, 168, 168]\n",
- "[271, 271, 272, 273, 273, 274, 274, 274, 275, 277]\n",
- "[350, 351, 351, 351, 352, 352, 353, 353, 353, 356]\n",
- "[78, 79, 80, 82, 82, 84, 84, 84, 85, 86]\n",
- "[356, 357, 361, 367, 367, 370, 370, 370, 372, 373]\n",
- "[148, 149, 149, 149, 149, 149, 150, 150, 150, 150]\n",
- "[171, 171, 172, 172, 172, 172, 173, 173, 173, 174]\n",
- "[906, 910, 917, 961, 963, 973, 989, 991, 998, 1830]\n",
- "[278, 278, 278, 280, 280, 282, 282, 283, 283, 283]\n",
- "[374, 377, 379, 382, 383, 384, 388, 389, 392, 394]\n",
- "[502, 504, 505, 524, 529, 534, 544, 546, 548, 553]\n",
- "[234, 234, 235, 236, 238, 238, 240, 240, 240, 241]\n",
- "[114, 114, 114, 114, 114, 114, 114, 114, 114, 114]\n",
- "[42, 43, 43, 43, 44, 45, 46, 46, 47, 47]\n",
- "[284, 284, 285, 285, 285, 286, 287, 287, 287, 288]\n",
- "[139, 139, 140, 140, 140, 140, 140, 140, 141, 141]\n",
- "[160, 160, 160, 161, 161, 162, 162, 162, 162, 163]\n",
- "[260, 260, 261, 261, 262, 262, 262, 262, 263, 263]\n",
- "[394, 396, 400, 400, 401, 401, 407, 410, 410, 410]\n",
- "[105, 105, 106, 106, 107, 107, 107, 107, 108, 108]\n",
- "[411, 414, 416, 416, 416, 417, 419, 420, 422, 424]\n",
- "[168, 168, 169, 169, 169, 170, 170, 170, 170, 170]\n",
- "[129, 129, 129, 129, 129, 129, 129, 129, 129, 130]\n",
- "[170, 170, 170, 171, 171, 171, 172, 172, 172, 172]\n",
- "[93, 93, 94, 94, 94, 95, 95, 96, 96, 97]\n",
- "[492, 496, 497, 504, 506, 506, 508, 509, 510, 513]\n",
- "[121, 121, 121, 121, 121, 121, 121, 121, 121, 121]\n",
- "[228, 229, 229, 229, 229, 230, 230, 231, 231, 231]\n",
- "[308, 309, 309, 311, 312, 312, 314, 316, 316, 316]\n",
- "[154, 154, 154, 154, 154, 155, 155, 155, 155, 155]\n",
- "[243, 243, 244, 244, 245, 248, 248, 249, 249, 249]\n",
- "[152, 152, 153, 153, 153, 153, 153, 153, 153, 154]\n",
- "[232, 232, 232, 232, 233, 233, 234, 234, 234, 234]\n",
- "[515, 518, 518, 520, 520, 525, 525, 526, 531, 534]\n",
- "[156, 156, 156, 156, 156, 156, 156, 157, 157, 157]\n",
- "[179, 179, 179, 180, 180, 180, 180, 180, 180, 180]\n",
- "[106, 106, 107, 107, 108, 108, 108, 108, 108, 109]\n",
- "[250, 252, 252, 253, 253, 253, 254, 254, 255, 255]\n",
- "[196, 196, 196, 197, 197, 197, 197, 197, 197, 198]\n",
- "[112, 113, 113, 113, 113, 114, 114, 114, 115, 115]\n",
- "[58, 59, 60, 61, 62, 63, 64, 64, 65, 66]\n",
- "[238, 239, 239, 240, 241, 242, 242, 242, 243, 243]\n",
- "[345, 346, 346, 346, 348, 348, 350, 350, 350, 351]\n",
- "[175, 176, 176, 176, 176, 176, 176, 177, 177, 177]\n",
- "[212, 212, 212, 213, 214, 215, 215, 215, 215, 216]\n",
- "[129, 129, 129, 129, 129, 129, 129, 130, 130, 130]\n",
- "[941, 947, 976, 978, 994, 1001, 1002, 1070, 1147, 1475]\n",
- "[76, 77, 77, 79, 79, 81, 81, 81, 82, 82]\n",
- "[19, 25, 27, 28, 32, 38, 38, 40, 42, 43]\n",
- "[351, 356, 357, 357, 358, 359, 362, 364, 364, 365]\n",
- "[145, 145, 145, 145, 145, 146, 146, 146, 146, 146]\n",
- "[125, 125, 125, 125, 126, 126, 126, 126, 126, 126]\n",
- "[166, 166, 167, 167, 167, 167, 167, 167, 167, 168]\n",
- "[141, 141, 142, 142, 142, 142, 143, 143, 143, 143]\n",
- "[331, 334, 334, 335, 337, 340, 340, 342, 343, 344]\n",
- "[188, 188, 188, 188, 189, 189, 189, 189, 190, 190]\n",
- "[146, 147, 147, 147, 147, 148, 148, 148, 148, 149]\n",
- "[115, 115, 116, 116, 116, 117, 117, 117, 117, 117]\n",
- "[291, 291, 292, 293, 293, 295, 297, 298, 298, 299]\n",
- "[209, 209, 209, 209, 210, 211, 211, 211, 212, 212]\n",
- "[813, 823, 837, 845, 878, 889, 907, 930, 930, 933]\n",
- "[151, 151, 151, 151, 151, 152, 152, 152, 152, 152]\n",
- "[186, 186, 186, 187, 187, 187, 187, 188, 188, 188]\n",
- "[157, 157, 157, 157, 158, 158, 158, 158, 158, 159]\n",
- "[44, 47, 48, 49, 50, 51, 52, 53, 53, 53]\n",
- "[101, 102, 102, 102, 103, 103, 103, 103, 105, 106]\n",
- "[149, 149, 149, 149, 149, 149, 150, 150, 150, 150]\n",
- "[131, 131, 131, 131, 131, 131, 132, 132, 132, 132]\n",
- "[222, 223, 223, 224, 224, 225, 225, 225, 226, 226]\n",
- "[201, 201, 202, 202, 202, 203, 204, 204, 204, 205]\n",
- "[138, 138, 138, 138, 138, 139, 139, 139, 140, 140]\n",
- "[218, 219, 220, 220, 221, 221, 221, 222, 222, 222]\n",
- "[368, 369, 372, 374, 375, 375, 376, 376, 377, 377]\n",
- "[180, 181, 181, 182, 183, 183, 183, 183, 183, 183]\n",
- "[399, 399, 401, 402, 407, 408, 409, 410, 411, 413]\n",
- "[172, 172, 173, 173, 174, 174, 174, 174, 175, 175]\n",
- "[140, 140, 140, 140, 140, 140, 141, 141, 141, 141]\n",
- "[117, 117, 118, 118, 119, 119, 119, 119, 119, 119]\n",
- "[67, 67, 67, 68, 69, 69, 70, 70, 70, 70]\n",
- "[269, 272, 273, 274, 275, 282, 283, 283, 285, 286]\n",
- "[98, 98, 99, 99, 99, 99, 100, 101, 101, 101]\n",
- "[109, 109, 109, 109, 109, 109, 110, 110, 110, 110]\n",
- "[575, 579, 581, 584, 590, 592, 622, 625, 625, 628]\n",
- "[472, 474, 475, 482, 484, 485, 485, 486, 490, 491]\n",
- "[135, 136, 136, 136, 136, 136, 137, 137, 137, 137]\n",
- "[198, 198, 199, 199, 199, 199, 200, 200, 201, 201]\n",
- "[534, 535, 550, 551, 552, 554, 558, 559, 562, 570]\n",
- "[133, 133, 133, 133, 133, 134, 134, 134, 134, 135]\n",
- "[177, 177, 177, 178, 178, 178, 178, 178, 179, 179]\n",
- "[132, 132, 132, 132, 133, 133, 133, 133, 133, 133]\n",
- "[119, 120, 120, 120, 120, 120, 121, 121, 121, 121]\n",
- "[127, 127, 127, 128, 128, 128, 128, 128, 128, 129]\n",
- "[326, 326, 328, 328, 328, 328, 329, 330, 330, 331]\n",
- "[262, 262, 263, 264, 265, 266, 266, 267, 267, 269]\n",
- "[121, 122, 122, 122, 122, 122, 123, 123, 123, 123]\n",
- "[143, 143, 144, 144, 144, 144, 144, 144, 144, 145]\n",
- "[258, 258, 258, 258, 259, 259, 260, 260, 261, 261]\n",
- "[300, 300, 301, 303, 304, 305, 305, 306, 306, 307]\n",
- "[159, 159, 160, 160, 160, 160, 160, 160, 161, 161]\n",
- "[161, 161, 161, 162, 162, 162, 163, 163, 163, 163]\n",
- "[287, 288, 288, 288, 288, 289, 289, 290, 291, 291]\n",
- "[163, 163, 164, 164, 164, 164, 164, 165, 165, 166]\n",
- "[123, 123, 123, 123, 124, 124, 124, 125, 125, 125]\n",
- "[216, 216, 216, 216, 217, 217, 217, 217, 217, 218]\n",
- "[317, 317, 318, 318, 319, 320, 320, 320, 322, 323]\n",
- "[379, 381, 382, 383, 385, 387, 390, 393, 398, 398]\n",
- "[53, 53, 54, 55, 55, 55, 55, 56, 56, 58]\n",
- "[205, 206, 206, 206, 206, 206, 207, 208, 208, 208]\n",
- "[414, 414, 419, 419, 420, 422, 423, 424, 427, 428]\n",
- "[83, 85, 85, 85, 85, 86, 86, 87, 87, 87]\n",
- "[704, 706, 709, 713, 720, 787, 788, 791, 804, 804]\n",
- "[168, 169, 169, 169, 169, 169, 170, 170, 170, 170]\n",
- "[184, 184, 184, 185, 185, 185, 185, 186, 186, 186]\n",
- "[88, 89, 90, 90, 90, 90, 90, 92, 92, 92]\n",
- "[110, 110, 111, 111, 111, 111, 111, 111, 111, 112]\n",
- "[454, 457, 459, 460, 461, 465, 467, 470, 471, 471]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 131, 131]\n",
- "[433, 435, 436, 440, 447, 448, 449, 451, 453, 453]\n",
- "[126, 126, 126, 127, 127, 127, 127, 127, 127, 127]\n",
- "[633, 634, 651, 657, 659, 679, 683, 690, 690, 691]\n",
- "[71, 71, 71, 71, 72, 72, 73, 73, 74, 74]\n",
- "[235, 235, 235, 236, 236, 237, 237, 238, 238, 238]\n",
- "[190, 190, 191, 191, 192, 192, 192, 193, 194, 195]\n",
- "[59, 61, 62, 62, 62, 63, 63, 64, 65, 65]\n",
- "[55, 55, 56, 56, 56, 57, 57, 58, 58, 59]\n",
- "[169, 169, 169, 171, 171, 171, 171, 171, 172, 172]\n",
- "[338, 338, 339, 339, 341, 341, 341, 341, 344, 344]\n",
- "[229, 229, 230, 230, 230, 231, 232, 233, 234, 234]\n",
- "[255, 255, 256, 256, 257, 257, 258, 258, 259, 261]\n",
- "[189, 190, 190, 190, 190, 191, 192, 192, 193, 193]\n",
- "[165, 165, 165, 166, 166, 166, 166, 166, 166, 166]\n",
- "[155, 155, 155, 156, 156, 156, 157, 157, 157, 157]\n",
- "[268, 268, 270, 270, 270, 271, 272, 272, 272, 274]\n",
- "[149, 149, 149, 149, 149, 150, 150, 150, 150, 150]\n",
- "[44, 44, 44, 44, 46, 46, 46, 47, 48, 49]\n",
- "[283, 284, 285, 285, 285, 285, 286, 288, 288, 289]\n",
- "[615, 617, 626, 637, 650, 655, 659, 664, 666, 671]\n",
- "[234, 234, 236, 236, 236, 237, 237, 237, 238, 238]\n",
- "[305, 306, 307, 307, 307, 307, 308, 308, 308, 308]\n",
- "[103, 104, 104, 105, 105, 105, 106, 106, 108, 108]\n",
- "[238, 238, 239, 239, 239, 239, 240, 241, 241, 241]\n",
- "[121, 122, 122, 122, 122, 122, 122, 123, 123, 123]\n",
- "[542, 543, 546, 560, 561, 561, 563, 574, 575, 581]\n",
- "[66, 67, 67, 67, 67, 67, 68, 69, 69, 69]\n",
- "[98, 98, 98, 98, 98, 99, 100, 101, 101, 103]\n",
- "[74, 74, 75, 76, 77, 77, 77, 78, 78, 78]\n",
- "[143, 143, 143, 143, 144, 144, 145, 145, 145, 145]\n",
- "[92, 93, 93, 94, 95, 96, 96, 96, 96, 97]\n",
- "[202, 203, 203, 203, 204, 204, 204, 205, 205, 205]\n",
- "[172, 172, 173, 173, 173, 174, 174, 174, 174, 175]\n",
- "[462, 463, 466, 468, 469, 469, 472, 475, 475, 477]\n",
- "[114, 114, 115, 115, 115, 115, 115, 115, 115, 116]\n",
- "[246, 246, 247, 247, 248, 249, 249, 249, 251, 251]\n",
- "[135, 135, 135, 135, 135, 135, 135, 136, 136, 136]\n",
- "[163, 163, 163, 163, 163, 164, 164, 164, 164, 165]\n",
- "[519, 520, 526, 528, 531, 531, 533, 534, 537, 541]\n",
- "[160, 160, 160, 160, 160, 161, 161, 162, 162, 162]\n",
- "[371, 372, 373, 373, 374, 375, 377, 379, 382, 383]\n",
- "[113, 113, 113, 113, 113, 113, 113, 113, 114, 114]\n",
- "[289, 291, 291, 291, 292, 292, 293, 293, 296, 296]\n",
- "[136, 136, 137, 137, 137, 138, 138, 138, 138, 138]\n",
- "[119, 119, 120, 120, 120, 120, 120, 120, 121, 121]\n",
- "[183, 183, 183, 184, 185, 187, 188, 188, 188, 188]\n",
- "[13, 21, 23, 23, 30, 31, 34, 35, 37, 38]\n",
- "[139, 139, 139, 139, 139, 139, 139, 140, 140, 140]\n",
- "[79, 79, 80, 80, 81, 81, 81, 81, 82, 83]\n",
- "[309, 309, 313, 313, 314, 314, 314, 315, 315, 315]\n",
- "[124, 124, 124, 124, 124, 125, 125, 125, 125, 126]\n",
- "[108, 108, 109, 109, 109, 110, 110, 110, 110, 110]\n",
- "[130, 130, 130, 130, 130, 130, 130, 130, 130, 130]\n",
- "[276, 276, 277, 277, 278, 278, 280, 280, 281, 283]\n",
- "[221, 221, 221, 223, 223, 223, 224, 224, 225, 225]\n",
- "[166, 166, 167, 167, 167, 167, 167, 169, 169, 169]\n",
- "[129, 129, 129, 129, 129, 129, 129, 129, 129, 129]\n",
- "[199, 200, 200, 200, 201, 201, 201, 201, 202, 202]\n",
- "[405, 407, 407, 410, 410, 412, 412, 413, 413, 414]\n",
- "[41, 41, 41, 42, 42, 42, 43, 43, 44, 44]\n",
- "[179, 179, 180, 181, 181, 182, 182, 183, 183, 183]\n",
- "[70, 71, 71, 71, 72, 72, 72, 72, 73, 73]\n",
- "[126, 126, 127, 127, 127, 127, 127, 127, 127, 127]\n",
- "[150, 151, 151, 151, 151, 151, 151, 151, 152, 152]\n",
- "[152, 152, 152, 153, 153, 154, 154, 154, 154, 155]\n",
- "[130, 130, 130, 131, 131, 131, 131, 131, 131, 131]\n",
- "[157, 157, 158, 158, 158, 158, 158, 158, 159, 160]\n",
- "[118, 118, 118, 118, 118, 119, 119, 119, 119, 119]\n",
- "[675, 681, 714, 715, 720, 732, 733, 736, 740, 743]\n",
- "[585, 591, 592, 594, 600, 608, 608, 612, 612, 613]\n",
- "[83, 84, 86, 86, 87, 89, 89, 90, 92, 92]\n",
- "[128, 128, 128, 128, 128, 128, 128, 128, 128, 129]\n",
- "[206, 208, 208, 208, 209, 209, 209, 209, 210, 210]\n",
- "[175, 175, 176, 176, 176, 177, 177, 177, 177, 177]\n",
- "[195, 196, 197, 198, 198, 198, 199, 199, 199, 199]\n",
- "[385, 386, 387, 389, 390, 391, 395, 396, 397, 399]\n",
- "[226, 226, 226, 227, 228, 228, 228, 228, 228, 228]\n",
- "[123, 123, 123, 123, 123, 123, 123, 123, 124, 124]\n",
- "[262, 263, 263, 264, 264, 264, 266, 266, 267, 267]\n",
- "[316, 317, 318, 320, 320, 320, 322, 322, 322, 323]\n",
- "[142, 142, 142, 142, 142, 142, 142, 143, 143, 143]\n",
- "[959, 961, 964, 975, 981, 982, 992, 1000, 1085, 1277]\n",
- "[133, 133, 133, 133, 133, 133, 133, 133, 133, 133]\n",
- "[147, 147, 147, 147, 148, 148, 148, 148, 149, 149]\n",
- "[145, 145, 145, 145, 146, 146, 146, 146, 146, 146]\n",
- "[361, 362, 362, 363, 366, 366, 367, 367, 368, 370]\n",
- "[251, 251, 252, 252, 253, 253, 253, 253, 254, 254]\n",
- "[243, 243, 243, 243, 244, 244, 245, 245, 246, 246]\n",
- "[116, 116, 117, 117, 117, 117, 117, 118, 118, 118]\n",
- "[216, 216, 217, 217, 217, 217, 217, 217, 218, 218]\n",
- "[415, 418, 420, 423, 425, 427, 429, 431, 436, 436]\n",
- "[193, 193, 193, 194, 194, 194, 195, 195, 195, 195]\n",
- "[110, 111, 111, 111, 111, 112, 112, 112, 112, 113]\n",
- "[49, 49, 50, 50, 51, 51, 51, 51, 53, 54]\n",
- "[347, 349, 349, 349, 351, 351, 351, 353, 354, 360]\n",
- "[747, 762, 785, 790, 885, 890, 903, 928, 938, 944]\n",
- "[219, 219, 219, 220, 220, 220, 220, 220, 220, 221]\n",
- "[133, 134, 134, 134, 134, 134, 134, 134, 134, 134]\n",
- "[436, 438, 438, 448, 451, 452, 455, 456, 458, 459]\n",
- "[211, 211, 211, 212, 212, 213, 213, 213, 215, 215]\n",
- "[177, 178, 178, 178, 179, 179, 179, 179, 179, 179]\n",
- "[131, 131, 131, 132, 132, 132, 132, 132, 132, 133]\n",
- "[296, 297, 298, 299, 299, 299, 300, 304, 304, 305]\n",
- "[140, 140, 140, 141, 141, 141, 141, 141, 141, 142]\n",
- "[324, 324, 329, 332, 334, 334, 335, 335, 335, 337]\n",
- "[483, 485, 486, 489, 492, 498, 504, 511, 515, 517]\n",
- "[144, 145, 145, 145, 145, 145, 145, 145, 145, 145]\n",
- "[117, 118, 118, 118, 118, 118, 118, 119, 119, 119]\n",
- "[184, 184, 185, 186, 186, 186, 186, 187, 187, 187]\n",
- "[141, 142, 142, 142, 142, 142, 142, 143, 143, 143]\n",
- "[635, 639, 656, 671, 677, 680, 685, 687, 693, 699]\n",
- "[197, 198, 198, 198, 199, 200, 200, 200, 201, 201]\n",
- "[140, 140, 140, 141, 141, 141, 141, 141, 141, 141]\n",
- "[469, 469, 470, 471, 476, 479, 481, 484, 486, 487]\n",
- "[82, 83, 83, 83, 83, 84, 84, 85, 85, 86]\n",
- "[316, 316, 319, 321, 322, 322, 324, 325, 325, 326]\n",
- "[115, 115, 115, 115, 116, 116, 117, 117, 117, 117]\n",
- "[258, 258, 260, 260, 261, 262, 263, 264, 265, 266]\n",
- "[247, 247, 247, 248, 248, 248, 248, 248, 248, 248]\n",
- "[49, 49, 50, 52, 52, 52, 55, 55, 56, 56]\n",
- "[130, 130, 130, 130, 130, 131, 131, 131, 131, 131]\n",
- "[211, 212, 212, 212, 212, 212, 212, 213, 213, 213]\n",
- "[135, 135, 136, 136, 137, 137, 137, 137, 137, 137]\n",
- "[193, 193, 193, 194, 194, 194, 194, 195, 195, 195]\n",
- "[327, 328, 329, 329, 329, 330, 331, 331, 332, 333]\n",
- "[101, 101, 102, 102, 102, 102, 103, 103, 103, 103]\n",
- "[156, 156, 156, 156, 157, 157, 157, 157, 157, 158]\n",
- "[114, 114, 114, 114, 114, 114, 114, 114, 114, 114]\n",
- "[163, 164, 164, 164, 165, 165, 165, 165, 165, 165]\n",
- "[151, 151, 151, 151, 151, 151, 151, 151, 152, 153]\n",
- "[145, 146, 146, 146, 146, 146, 147, 147, 148, 148]\n",
- "[254, 255, 256, 256, 256, 257, 257, 257, 257, 257]\n",
- "[64, 64, 64, 65, 65, 66, 68, 68, 69, 69]\n",
- "[169, 169, 170, 170, 170, 170, 170, 170, 170, 171]\n",
- "[154, 154, 154, 154, 155, 155, 155, 155, 155, 156]\n",
- "[785, 787, 794, 806, 813, 840, 846, 859, 900, 920]\n",
- "[201, 201, 201, 202, 202, 202, 203, 203, 203, 204]\n",
- "[119, 119, 119, 119, 119, 119, 119, 119, 119, 120]\n",
- "[235, 236, 236, 236, 236, 237, 238, 238, 241, 241]\n",
- "[42, 43, 44, 45, 45, 46, 46, 47, 48, 49]\n",
- "[181, 182, 182, 182, 182, 183, 183, 183, 184, 184]\n",
- "[363, 363, 364, 364, 365, 367, 369, 369, 370, 370]\n",
- "[174, 176, 176, 177, 177, 177, 177, 178, 178, 178]\n",
- "[548, 552, 558, 559, 565, 570, 578, 578, 580, 586]\n",
- "[122, 122, 122, 123, 123, 123, 123, 124, 124, 124]\n",
- "[204, 207, 207, 208, 208, 208, 209, 209, 210, 211]\n",
- "[103, 104, 104, 104, 105, 106, 106, 106, 106, 106]\n",
- "[297, 300, 300, 301, 302, 302, 303, 303, 304, 305]\n",
- "[138, 138, 138, 139, 139, 139, 139, 139, 140, 140]\n",
- "[422, 424, 426, 427, 427, 431, 435, 441, 446, 449]\n",
- "[86, 86, 88, 88, 89, 90, 91, 91, 91, 92]\n",
- "[120, 120, 120, 120, 121, 121, 121, 122, 122, 122]\n",
- "[372, 372, 372, 372, 375, 375, 375, 376, 376, 376]\n",
- "[96, 97, 97, 97, 98, 98, 98, 98, 100, 100]\n",
- "[160, 161, 161, 161, 161, 161, 162, 162, 162, 163]\n",
- "[590, 591, 597, 600, 607, 607, 615, 615, 629, 632]\n",
- "[377, 380, 380, 384, 388, 390, 390, 394, 395, 396]\n",
- "[69, 69, 69, 69, 70, 70, 71, 71, 71, 71]\n",
- "[343, 345, 345, 345, 346, 347, 348, 349, 350, 351]\n",
- "[279, 280, 280, 280, 280, 281, 281, 282, 283, 283]\n",
- "[128, 128, 128, 128, 128, 129, 129, 129, 129, 130]\n",
- "[449, 454, 456, 457, 458, 460, 461, 465, 466, 469]\n",
- "[14, 27, 33, 34, 37, 38, 40, 41, 42, 42]\n",
- "[165, 165, 165, 166, 166, 166, 166, 166, 166, 166]\n",
- "[178, 179, 179, 180, 180, 181, 181, 181, 181, 181]\n",
- "[266, 267, 268, 269, 269, 270, 270, 271, 272, 272]\n",
- "[283, 284, 284, 286, 287, 287, 287, 287, 287, 288]\n",
- "[109, 109, 109, 109, 110, 110, 111, 111, 112, 112]\n",
- "[353, 354, 354, 354, 354, 354, 357, 358, 359, 361]\n",
- "[226, 226, 226, 226, 227, 227, 228, 228, 228, 230]\n",
- "[528, 530, 531, 532, 533, 539, 542, 544, 546, 546]\n",
- "[158, 158, 159, 159, 159, 159, 160, 160, 160, 160]\n",
- "[167, 167, 168, 168, 168, 168, 169, 169, 169, 169]\n",
- "[148, 148, 148, 149, 149, 149, 149, 149, 149, 150]\n",
- "[107, 107, 108, 108, 108, 108, 108, 108, 108, 109]\n",
- "[126, 126, 126, 126, 127, 127, 127, 127, 127, 128]\n",
- "[409, 411, 413, 416, 417, 419, 420, 421, 421, 421]\n",
- "[288, 289, 289, 290, 294, 294, 295, 295, 295, 296]\n",
- "[273, 273, 275, 275, 275, 276, 276, 277, 277, 277]\n",
- "[133, 133, 133, 133, 133, 134, 134, 134, 134, 134]\n",
- "[150, 150, 150, 150, 150, 150, 150, 150, 150, 150]\n",
- "[190, 190, 190, 190, 191, 192, 192, 192, 192, 193]\n",
- "[400, 402, 402, 405, 405, 406, 407, 408, 409, 409]\n",
- "[143, 143, 143, 143, 144, 144, 144, 144, 144, 144]\n",
- "[112, 112, 112, 112, 113, 113, 113, 113, 113, 113]\n",
- "[306, 308, 309, 310, 310, 311, 313, 314, 314, 315]\n",
- "[230, 230, 230, 231, 233, 234, 235, 235, 235, 235]\n",
- "[171, 171, 172, 172, 172, 172, 173, 173, 173, 174]\n",
- "[219, 220, 220, 220, 221, 222, 222, 224, 225, 226]\n",
- "[153, 153, 153, 153, 153, 153, 153, 154, 154, 154]\n",
- "[214, 214, 215, 216, 217, 217, 218, 218, 218, 219]\n",
- "[131, 131, 131, 131, 132, 132, 132, 132, 133, 133]\n",
- "[334, 335, 335, 335, 335, 336, 337, 339, 340, 342]\n",
- "[242, 242, 242, 243, 244, 244, 245, 245, 246, 247]\n",
- "[929, 946, 951, 960, 972, 992, 995, 999, 1001, 1316]\n",
- "[187, 188, 188, 188, 188, 188, 188, 188, 189, 190]\n",
- "[195, 195, 195, 196, 196, 196, 196, 196, 197, 197]\n",
- "[124, 124, 124, 124, 125, 125, 125, 125, 126, 126]\n",
- "[249, 250, 251, 251, 252, 252, 253, 253, 253, 254]\n",
- "[77, 77, 78, 78, 79, 79, 79, 80, 80, 81]\n",
- "[137, 137, 137, 137, 137, 138, 138, 138, 138, 138]\n",
- "[93, 93, 93, 93, 94, 94, 95, 96, 96, 96]\n",
- "[72, 73, 73, 73, 76, 76, 76, 76, 76, 77]\n",
- "[497, 499, 507, 510, 511, 514, 517, 520, 525, 526]\n",
- "[56, 56, 58, 59, 62, 62, 62, 62, 63, 64]\n",
- "[700, 700, 702, 733, 745, 746, 750, 759, 766, 779]\n"
+ "Batch exmaples lengths: [3705, 3748, 3759, 3791, 3811, 3918, 3936, 4044, 4102, 4113]\n",
+ "Batch exmaples lengths: [1653, 1656, 1660, 1665, 1667, 1671, 1671, 1675, 1685, 1688]\n",
+ "Batch exmaples lengths: [444, 445, 445, 453, 461, 461, 463, 466, 468, 471]\n",
+ "Batch exmaples lengths: [979, 981, 981, 983, 983, 983, 985, 988, 989, 990]\n",
+ "Batch exmaples lengths: [3270, 3282, 3282, 3300, 3354, 3359, 3377, 3381, 3408, 3488]\n",
+ "Batch exmaples lengths: [2611, 2611, 2618, 2642, 2671, 2674, 2693, 2716, 2748, 2760]\n",
+ "Batch exmaples lengths: [508, 509, 510, 516, 517, 518, 520, 523, 535, 536]\n",
+ "Batch exmaples lengths: [1386, 1386, 1391, 1392, 1392, 1402, 1406, 1407, 1409, 1412]\n",
+ "Batch exmaples lengths: [672, 673, 673, 673, 673, 673, 674, 674, 675, 676]\n",
+ "Batch exmaples lengths: [302, 302, 303, 308, 309, 310, 313, 316, 318, 320]\n",
+ "Batch exmaples lengths: [2473, 2481, 2543, 2553, 2565, 2580, 2581, 2582, 2586, 2608]\n"
],
"name": "stdout"
}
]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {
+ "id": "1YnDV4IQWF2v"
+ },
+ "source": [
+ "## **Final Note**\n",
+ "\n",
+ "If you made it this far **Congrats!** 🎊 and **Thank you!** 🙏 for your interest in my tutorial!\n",
+ "\n",
+ "I've been using this code for a while now and I feel it got to a point where is nicely documented and easy to follow.\n",
+ "\n",
+ "Of course is easy for me to follow because I built it. That is why any feedback is welcome and it helps me improve my future tutorials!\n",
+ "\n",
+ "If you see something wrong please let me know by opening an issue on my [ml_things GitHub repository](https://github.com/gmihaila/ml_things/issues)!\n",
+ "\n",
+ "A lot of tutorials out there are mostly a one-time thing and are not being maintained. I plan on keeping my tutorials up to date as much as I can.\n",
+ "\n",
+ "## **Contact** 🎣\n",
+ "\n",
+ "🦊 GitHub: [gmihaila](https://github.com/gmihaila)\n",
+ "\n",
+ "🌐 Website: [gmihaila.github.io](https://gmihaila.github.io/)\n",
+ "\n",
+ "👔 LinkedIn: [mihailageorge](https://medium.com/r/?url=https%3A%2F%2Fwww.linkedin.com%2Fin%2Fmihailageorge)\n",
+ "\n",
+ "📬 Email: [georgemihaila@my.unt.edu.com](mailto:georgemihaila@my.unt.edu.com?subject=GitHub%20Website)"
+ ]
}
]
}
\ No newline at end of file