othertales commited on
Commit
ccc9353
·
verified ·
1 Parent(s): 1e784a8

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -59,3 +59,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
  data/train.jsonl filter=lfs diff=lfs merge=lfs -text
61
  meta/discovery_cache/discovered_documents_20250928_195619.json filter=lfs diff=lfs merge=lfs -text
 
 
 
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
  data/train.jsonl filter=lfs diff=lfs merge=lfs -text
61
  meta/discovery_cache/discovered_documents_20250928_195619.json filter=lfs diff=lfs merge=lfs -text
62
+ data/tmpc343c9mc filter=lfs diff=lfs merge=lfs -text
63
+ data/tmpolt7cobs filter=lfs diff=lfs merge=lfs -text
configure.py ADDED
@@ -0,0 +1,235 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import argparse
2
+ import copy
3
+ import json
4
+ import logging
5
+ import sys
6
+ import tempfile
7
+ from pathlib import Path
8
+ from typing import Iterable, List, Tuple
9
+ import xml.etree.ElementTree as ET
10
+
11
+
12
+ LOGGER = logging.getLogger("configure")
13
+
14
+ NS = {
15
+ "leg": "http://www.legislation.gov.uk/namespaces/legislation",
16
+ "ukm": "http://www.legislation.gov.uk/namespaces/metadata",
17
+ "dc": "http://purl.org/dc/elements/1.1/",
18
+ "dct": "http://purl.org/dc/terms/",
19
+ }
20
+
21
+
22
+ def enrich(record: dict) -> dict:
23
+ root = ET.fromstring(record["xml_content"])
24
+
25
+ record["coming_into_force_date"] = root.get("RestrictStartDate")
26
+ extent = root.get("RestrictExtent")
27
+ record["extent"] = extent.split("+") if extent else None
28
+
29
+ meta = root.find("ukm:Metadata", NS)
30
+ if meta is not None:
31
+ valid = meta.find("dct:valid", NS)
32
+ record["publication_date"] = valid.text if valid is not None else None
33
+
34
+ status = root.find(".//ukm:PrimaryMetadata/ukm:DocumentClassification/ukm:DocumentStatus", NS)
35
+ record["status"] = status.get("Value") if status is not None else record.get("status")
36
+
37
+ enactment = root.find(".//ukm:PrimaryMetadata/ukm:EnactmentDate", NS)
38
+ record["enacted_date"] = enactment.get("Date") if enactment is not None else None
39
+
40
+ return record
41
+
42
+
43
+ def _iter_data_files(config_path: Path, splits: Iterable[str] | None = None) -> List[Tuple[str, Path]]:
44
+ with config_path.open("r", encoding="utf-8") as handle:
45
+ config = json.load(handle)
46
+
47
+ data_files = config.get("data_files", {})
48
+ if isinstance(data_files, list):
49
+ data_files = {"train": data_files}
50
+ elif isinstance(data_files, str):
51
+ data_files = {"train": data_files}
52
+
53
+ selected_splits = set(splits) if splits else set(data_files.keys())
54
+
55
+ resolved: List[Tuple[str, Path]] = []
56
+ for split, value in data_files.items():
57
+ if split not in selected_splits:
58
+ continue
59
+
60
+ if isinstance(value, list):
61
+ paths = value
62
+ else:
63
+ paths = [value]
64
+
65
+ for rel_path in paths:
66
+ resolved.append((split, (config_path.parent / rel_path).resolve()))
67
+
68
+ if not resolved:
69
+ raise ValueError("No data files found for the requested splits")
70
+
71
+ return resolved
72
+
73
+
74
+ def _configure_logging(verbose: bool) -> None:
75
+ level = logging.DEBUG if verbose else logging.INFO
76
+ handler = logging.StreamHandler()
77
+ handler.setFormatter(logging.Formatter("%(levelname)s: %(message)s"))
78
+ LOGGER.setLevel(level)
79
+ LOGGER.handlers.clear()
80
+ LOGGER.addHandler(handler)
81
+ LOGGER.propagate = False
82
+
83
+
84
+ def _process_file(path: Path, *, dry_run: bool, strict: bool, limit: int | None) -> Tuple[int, int, int]:
85
+ total = 0
86
+ changed = 0
87
+ errors = 0
88
+
89
+ if limit is not None and not dry_run:
90
+ raise ValueError("--limit can only be used together with --dry-run")
91
+
92
+ if not path.exists():
93
+ raise FileNotFoundError(f"Data file not found: {path}")
94
+
95
+ LOGGER.info(
96
+ "Processing %s (dry_run=%s, strict=%s, limit=%s)",
97
+ path,
98
+ dry_run,
99
+ strict,
100
+ limit,
101
+ )
102
+
103
+ tmp_file = None
104
+ tmp_path = None
105
+
106
+ if not dry_run:
107
+ tmp_file = tempfile.NamedTemporaryFile("w", delete=False, dir=str(path.parent), encoding="utf-8")
108
+ tmp_path = Path(tmp_file.name)
109
+
110
+ try:
111
+ with path.open("r", encoding="utf-8") as reader:
112
+ for line in reader:
113
+ line = line.rstrip("\n")
114
+ if not line:
115
+ if not dry_run and tmp_file is not None:
116
+ tmp_file.write("\n")
117
+ total += 1
118
+ if limit is not None and total >= limit:
119
+ break
120
+ continue
121
+
122
+ record = json.loads(line)
123
+ original = copy.deepcopy(record)
124
+
125
+ try:
126
+ enriched = enrich(record)
127
+ except ET.ParseError as exc:
128
+ errors += 1
129
+ if strict:
130
+ raise
131
+ enriched = original
132
+ LOGGER.warning("XML parse error in %s: %s", path, exc)
133
+
134
+ if enriched != original:
135
+ changed += 1
136
+
137
+ if LOGGER.isEnabledFor(logging.DEBUG):
138
+ LOGGER.debug(
139
+ "Processed record #%s (id=%s) – changed=%s",
140
+ total + 1,
141
+ record.get("id"),
142
+ enriched != original,
143
+ )
144
+
145
+ if not dry_run and tmp_file is not None:
146
+ tmp_file.write(json.dumps(enriched))
147
+ tmp_file.write("\n")
148
+
149
+ total += 1
150
+ if limit is not None and total >= limit:
151
+ break
152
+ finally:
153
+ if tmp_file is not None:
154
+ tmp_file.close()
155
+
156
+ if not dry_run and tmp_path is not None:
157
+ tmp_path.replace(path)
158
+
159
+ return total, changed, errors
160
+
161
+
162
+ def main(argv: List[str] | None = None) -> int:
163
+ parser = argparse.ArgumentParser(description="Enrich dataset records with metadata extracted from XML content")
164
+ parser.add_argument(
165
+ "--dataset-config",
166
+ default="dataset_config.json",
167
+ type=Path,
168
+ help="Path to the dataset_config.json file (default: dataset_config.json)",
169
+ )
170
+ parser.add_argument(
171
+ "--split",
172
+ action="append",
173
+ help="Specific dataset split(s) to process (default: all splits in the config)",
174
+ )
175
+ parser.add_argument(
176
+ "--dry-run",
177
+ action="store_true",
178
+ help="Run enrichment without writing changes back to disk",
179
+ )
180
+ parser.add_argument(
181
+ "--limit",
182
+ type=int,
183
+ help="Only process the first N records (implies --dry-run)",
184
+ )
185
+ parser.add_argument(
186
+ "--strict",
187
+ action="store_true",
188
+ help="Fail immediately on XML parse errors instead of skipping the affected records",
189
+ )
190
+ parser.add_argument(
191
+ "--verbose",
192
+ action="store_true",
193
+ help="Enable verbose logging (includes per-file progress details)",
194
+ )
195
+
196
+ args = parser.parse_args(argv)
197
+
198
+ if args.limit is not None:
199
+ args.dry_run = True
200
+
201
+ _configure_logging(args.verbose)
202
+
203
+ config_path: Path = args.dataset_config.resolve()
204
+
205
+ files = _iter_data_files(config_path, args.split)
206
+
207
+ overall_total = 0
208
+ overall_changed = 0
209
+ overall_errors = 0
210
+
211
+ for split, data_path in files:
212
+ total, changed, errors = _process_file(data_path, dry_run=args.dry_run, strict=args.strict, limit=args.limit)
213
+ overall_total += total
214
+ overall_changed += changed
215
+ overall_errors += errors
216
+ LOGGER.info(
217
+ "Processed %s record(s) for split '%s' in %s – %s updated, %s error(s)",
218
+ total,
219
+ split,
220
+ data_path,
221
+ changed,
222
+ errors,
223
+ )
224
+
225
+ if args.dry_run:
226
+ LOGGER.info("Dry run – no files were modified")
227
+
228
+ if overall_errors:
229
+ LOGGER.warning("Completed with %s XML error(s)", overall_errors)
230
+
231
+ return 0 if (args.strict and overall_errors == 0) or not args.strict else int(overall_errors > 0)
232
+
233
+
234
+ if __name__ == "__main__":
235
+ raise SystemExit(main())
data/tmpc343c9mc ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ea701e6c926b958e8b37d26928aae1d1fcbec9aa9bbf94e805b9110c78b8f21
3
+ size 522429141
data/tmpolt7cobs ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30f54a2ce88d5d9d22d8d50c5d39a485c2327fb3f850a2b677655bac8b091ea1
3
+ size 1095257920
data/train.jsonl CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:eac5375c52cce2121a3d95d0c0ecd4165f132a8c9b7a549e89a0a18686c732b2
3
- size 13119988636
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68a673049dd74f529027e66ca0eb7bb1c30c3671efb43b429571406f61b4abd9
3
+ size 13120669031
uk_legislation_scraper.log ADDED
File without changes