Freeciv-3.3
Loading...
Searching...
No Matches
generate_packets.py
Go to the documentation of this file.
1#!/usr/bin/env python3
2
3#
4# Freeciv - Copyright (C) 2003 - Raimar Falke
5# This program is free software; you can redistribute it and/or modify
6# it under the terms of the GNU General Public License as published by
7# the Free Software Foundation; either version 2, or (at your option)
8# any later version.
9#
10# This program is distributed in the hope that it will be useful,
11# but WITHOUT ANY WARRANTY; without even the implied warranty of
12# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13# GNU General Public License for more details.
14#
15
16# This script runs under Python 3.6 and up. Please leave it so.
17# It might also run under older versions, but no such guarantees are made.
18
19import re
20import argparse
21import sys
22from pathlib import Path
23from contextlib import contextmanager, ExitStack
24from functools import partial
25from itertools import chain, combinations, takewhile, zip_longest
26from enum import Enum
27from abc import ABC, abstractmethod
28
29try:
30 from functools import cache
31except ImportError:
32 from functools import lru_cache
33 cache = lru_cache(None)
34 del lru_cache
35
36import typing
37T_co = typing.TypeVar("T_co", covariant = True)
38
39
40
41
42def file_path(s: "str | Path") -> Path:
43 """Parse the given path and check basic validity."""
44 path = Path(s)
45
46 if path.is_reserved() or not path.name:
47 raise ValueError(f"not a valid file path: {s!r}")
48 if path.exists() and not path.is_file():
49 raise ValueError(f"not a file: {s!r}")
50
51 return path
52
53
55 """Contains configuration info for the script's execution, along with
56 functions closely tied to that configuration"""
57
58 def_paths: "list[Path]"
59 """Paths to definition files, in load order"""
60 common_header_path: "Path | None"
61 """Output path for the common header, or None if that should not
62 be generated"""
63 common_impl_path: "Path | None"
64 """Output path for the common implementation, or None if that
65 should not be generated"""
66 server_header_path: "Path | None"
67 """Output path for the server header, or None if that should not
68 be generated"""
69 server_impl_path: "Path | None"
70 """Output path for the server implementation, or None if that
71 should not be generated"""
72 client_header_path: "Path | None"
73 """Output path for the client header, or None if that should not
74 be generated"""
75 client_impl_path: "Path | None"
76 """Output path for the client implementation, or None if that
77 should not be generated"""
78
79 verbose: bool
80 """Whether to enable verbose logging"""
81 lazy_overwrite: bool
82 """Whether to lazily overwrite output files"""
83
84 gen_stats: bool
85 """Whether to generate delta stats code"""
86 log_macro: "str | None"
87 """The macro used for log calls, or None if no log calls should
88 be generated"""
89
90 fold_bool: bool
91 """Whether to fold boolean fields into the packet header"""
92
93 @staticmethod
94 def get_argparser() -> argparse.ArgumentParser:
95 """Construct an argument parser for a packet generation script"""
96 parser = argparse.ArgumentParser(
97 description = "Generate packet-related code from packets.def",
98 add_help = False, # we'll add a help option explicitly
99 )
100
101 # Argument groups
102 # Note the order:
103 # We want the path arguments to show up *first* in the help text
104
105 paths = parser.add_argument_group(
106 "Input and output paths",
107 "The following parameters decide which files to read and write."
108 " Omitting an output path will not generate that file.",
109 )
110
111 script = parser.add_argument_group(
112 "Script configuration",
113 "The following parameters change how the script operates.",
114 )
115
116 output = parser.add_argument_group(
117 "Output configuration",
118 "The following parameters change the amount of output.",
119 )
120
121 protocol = parser.add_argument_group(
122 "Protocol configuration",
123 "The following parameters CHANGE the protocol."
124 " You have been warned.",
125 )
126
127 # Individual arguments
128 # Note the order:
129 # We want the path arguments to show up *last* in the usage summary
130
131 script.add_argument("-h", "--help", action = "help",
132 help = "show this help message and exit")
133
134 script.add_argument("-v", "--verbose", action = "store_true",
135 help = "enable log messages during code generation")
136
137 # When enabled: Only overwrite existing output files when they
138 # actually changed. This prevents the build system from rebuilding all
139 # dependents in cases where that wouldn't even be necessary.
140 script.add_argument("--lazy-overwrite", action = "store_true",
141 help = "only overwrite output files when their"
142 " contents actually changed")
143
144 output.add_argument("-s", "--gen-stats", action = "store_true",
145 help = "generate code reporting packet usage"
146 " statistics; call delta_stats_report to get these")
147
148 logs = output.add_mutually_exclusive_group()
149 logs.add_argument("-l", "--log-macro", default = "log_packet_detailed",
150 help = "use the given macro for generated log calls")
151 logs.add_argument("-L", "--no-logs", dest = "log_macro",
152 action = "store_const", const = None,
153 help = "disable generating log calls")
154
155 protocol.add_argument("-B", "--no-fold-bool",
156 dest = "fold_bool", action = "store_false",
157 help = "explicitly encode boolean values in the"
158 " packet body, rather than folding them into the"
159 " packet header")
160
161 output_path_args = (
162 # (dest, option, canonical path)
163 ("common_header_path", "--common-h", "common/packets_gen.h"),
164 ("common_impl_path", "--common-c", "common/packets_gen.c"),
165 ("client_header_path", "--client-h", "client/packhand_gen.h"),
166 ("client_impl_path", "--client-c", "client/packhand_gen.c"),
167 ("server_header_path", "--server-h", "server/hand_gen.h"),
168 ("server_impl_path", "--server-c", "server/hand_gen.c"),
169 )
170
171 for dest, option, canonical in output_path_args:
172 paths.add_argument(option, dest = dest, type = file_path,
173 help = f"output path for {canonical}")
174
175 paths.add_argument("def_paths", metavar = "def_path",
176 nargs = "+", type = file_path,
177 help = "paths to your packets.def file")
178
179 return parser
180
181 def __init__(self, args: "typing.Sequence[str] | None" = None):
182 __class__.get_argparser().parse_args(args, namespace = self)
183
184 def log_verbose(self, *args):
185 """Print the given arguments iff verbose logging is enabled"""
186 if self.verbose:
187 print(*args)
188
189 @property
190 def _root_path(self) -> "Path | None":
191 """Root Freeciv path, if we can find it."""
192 path = Path(__file__).absolute()
193 root = path.parent.parent
194 if path != root / "common" / "generate_packets.py":
195 self.log_verbose("Warning: couldn't find Freeciv root path")
196 return None
197 return root
198
199 def _relative_path(self, path: Path) -> Path:
200 """Find the relative path from the Freeciv root to the given path.
201 Return the path unmodified if it's outside the Freeciv root, or if
202 the Freeciv root could not be found."""
203 root = self._root_path
204 if root is not None:
205 try:
206 return path.absolute().relative_to(root)
207 except ValueError:
208 self.log_verbose(f"Warning: path {path} outside of Freeciv root")
209 return path
210
211 @property
212 def _script_path(self) -> Path:
213 """Relative path of the executed script. Under normal circumstances,
214 this will be common/generate_packets.py, but it may differ when this
215 module is imported from another script."""
216 return self._relative_path(Path(sys.argv[0]))
217
218 def _write_disclaimer(self, f: typing.TextIO):
219 f.write(f"""\
220 /**************************************************************************
221 * THIS FILE WAS GENERATED *
222 * Script: {self._script_path!s:63} *
223""")
224
225 for path in self.def_paths:
226 f.write(f"""\
227 * Input: {self._relative_path(path)!s:63} *
228""")
229
230 f.write(f"""\
231 * DO NOT CHANGE THIS FILE *
232 **************************************************************************/
233
234""")
235
236 @contextmanager
237 def _wrap_header(self, file: typing.TextIO, header_name: str) -> typing.Iterator[None]:
238 """Add multiple inclusion protection to the given file"""
239 name = f"FC__{header_name.upper()}_H"
240 file.write(f"""\
241#ifndef {name}
242#define {name}
243
244""")
245
246 yield
247
248 file.write(f"""\
249
250#endif /* {name} */
251""")
252
253 @contextmanager
254 def _wrap_cplusplus(self, file: typing.TextIO) -> typing.Iterator[None]:
255 """Add code for `extern "C" {}` wrapping"""
256 file.write(f"""\
257#ifdef __cplusplus
258extern "C" {{
259#endif /* __cplusplus */
260
261""")
262 yield
263 file.write(f"""\
264
265#ifdef __cplusplus
266}}
267#endif /* __cplusplus */
268""")
269
270 @contextmanager
271 def open_write(self, path: "str | Path", wrap_header: "str | None" = None, cplusplus: bool = True) -> typing.Iterator[typing.TextIO]:
272 """Open a file for writing and write disclaimer.
273
274 If enabled, lazily overwrites the given file.
275 If wrap_header is given, add multiple inclusion protection; if
276 cplusplus is also given (default), also add code for `extern "C"`
277 wrapping."""
278 path = Path(path) # no-op if path is already a Path object
279 self.log_verbose(f"writing {path}")
280
281 with ExitStack() as stack:
282 if self.lazy_overwrite:
283 file = stack.enter_context(self.lazy_overwrite_open(path))
284 else:
285 file = stack.enter_context(path.open("w"))
286
287 self._write_disclaimer(file)
288
289 if wrap_header is not None:
290 stack.enter_context(self._wrap_header(file, wrap_header))
291 if cplusplus:
292 stack.enter_context(self._wrap_cplusplus(file))
293 yield file
294 self.log_verbose(f"done writing {path}")
295
296 @contextmanager
297 def lazy_overwrite_open(self, path: "str | Path", suffix: str = ".tmp") -> typing.Iterator[typing.TextIO]:
298 """Open a file for writing, but only actually overwrite it if the new
299 content differs from the old content.
300
301 This creates a temporary file by appending the given suffix to the given
302 file path. In the event of an error, this temporary file might remain in
303 the target file's directory."""
304
305 path = Path(path)
306 tmp_path = path.with_name(path.name + suffix)
307
308 # if tmp_path already exists, assume it's left over from a previous,
309 # failed run and can be overwritten without trouble
310 self.log_verbose(f"lazy: using {tmp_path}")
311 with tmp_path.open("w") as file:
312 yield file
313
314 if path.exists() and files_equal(tmp_path, path):
315 self.log_verbose("lazy: no change, deleting...")
316 tmp_path.unlink()
317 else:
318 self.log_verbose("lazy: content changed, replacing...")
319 tmp_path.replace(path)
320
321
322################### General helper functions and classes ###################
323
324def files_equal(path_a: "str | Path", path_b: "str | Path") -> bool:
325 """Return whether the contents of two text files are identical"""
326 with open(path_a) as file_a, open(path_b) as file_b:
327 return all(a == b for a, b in zip_longest(file_a, file_b))
328
329# Taken from https://docs.python.org/3.4/library/itertools.html#itertools-recipes
330def powerset(iterable: typing.Iterable[T_co]) -> "typing.Iterator[tuple[T_co, ...]]":
331 "powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
332 s = list(iterable)
333 return chain.from_iterable(combinations(s, r) for r in range(len(s)+1))
334
335INSERT_PREFIX_PATTERN: "typing.Final" = re.compile(r"^(?!#|$)", re.MULTILINE)
336"""Matches the beginning of nonempty lines that are not preprocessor
337directives, i.e. don't start with a #"""
338
339def prefix(prefix: str, text: str) -> str:
340 """Prepend prefix to every line of text, except blank lines and those
341 starting with #"""
342 return INSERT_PREFIX_PATTERN.sub(prefix, text)
343
344
345class Location:
346 """Roughly represents a location in memory for the generated code;
347 outside of recursive field types like arrays, this will usually just be
348 a field of a packet, but it serves to concisely handle the recursion."""
349
350 # placeholder that will clearly be an error if it accidentally
351 # shows up in generated code
352 _PACKET = "#error gen_packet$"
353 _INDICES = "ijk"
354
355 name: str
356 """The name associated with this location; used in log messages."""
357 _location: str
358 """The actual location as used in code, including placeholders for
359 where the packet name goes"""
360 depth: int
361 """The array nesting depth of this location; used to determine index
362 variable names."""
363 json_depth: int
364 """The total sub-location nesting depth of the JSON field address
365 for this location"""
366
367 def __init__(self, name: str, *, location: "str | None" = None,
368 depth: int = 0, json_depth: "int | None" = None):
369 self.name = name
370 self._location = location if location is not None else self._PACKET + name
371 self.depth = depth
372 self.json_depth = json_depth if json_depth is not None else depth
373
374 @classmethod
375 @cache
376 def constant(cls, location: str, name: str = "(unknown)") -> "Location":
377 """Construct a Location not dependent on the packet"""
378 assert cls._PACKET not in location
379 return cls(name, location = location)
380
381 def replace(self, new_location: str) -> "Location":
382 """Return the given string as a new Location with the same metadata
383 as self"""
384 return type(self)(
385 name = self.name,
386 location = new_location,
387 depth = self.depth,
388 json_depth = self.json_depth,
389 )
390
391 def deeper(self, new_location: str, json_step: int = 1) -> "Location":
392 """Return the given string as a new Location with the same name as
393 self and incremented depth"""
394 return type(self)(
395 name = self.name,
396 location = new_location,
397 depth = self.depth + 1,
398 json_depth = self.json_depth + json_step,
399 )
400
401 def sub_full(self, json_step: int = 1) -> "Location":
402 """Like self.sub, but with the option to step the JSON nesting
403 depth by a different amount."""
404 return self.deeper(f"{self}[{self.index}]", json_step)
405
406 @property
407 def index(self) -> str:
408 """The index name for the current depth"""
409 if self.depth > len(self._INDICES):
410 return self._INDICES[0] + str(self.depth) # i3, i4, i5...
411 return self._INDICES[self.depth]
412
413 @property
414 def sub(self) -> "Location":
415 """A location one level deeper with the current index subscript
416 added to the end.
417
418 `field` ~> `field[i]` ~> `field[i][j]` etc."""
419 return self.sub_full()
420
421 @property
422 def json_subloc(self) -> str:
423 """The plocation (JSON field address) to the sub-location
424 of this location's corresponding field address"""
425 return "field_addr.sub_location" + self.json_depth * "->sub_location"
426
427 def __matmul__(self, packet: "str | None") -> str:
428 """self @ packet
429 Code fragment of this location in the given packet, or in local
430 variables if packet is None"""
431 packet = f"{packet}->" if packet is not None else ""
432 return self._location.replace(self._PACKET, packet)
433
434 def __str__(self) -> str:
435 return self._location
436
437 def __repr__(self) -> str:
438 return f"<{type(self).__name__} {self.name}(depth={self.depth}, json_depth={self.json_depth}) {self @ 'PACKET'}>"
439
440 def __eq__(self, other) -> bool:
441 if other is self:
442 return True
443 if not isinstance(other, __class__):
444 return NotImplemented
445 return (
446 self.name == other.name
447 and self._location == other._location
448 and self.depth == other.depth
449 and self.json_depth == other.json_depth
450 )
451
452 def __hash__(self) -> int:
453 return hash((__class__, self.name, self._location, self.depth, self.json_depth))
454
455
456#################### Components of a packets definition ####################
457
458class FieldFlags:
459 """Information about flags of a given Field. Multiple Field objects can
460 share one FieldFlags instance, e.g. when defined on the same line."""
461
462 ADD_CAP_PATTERN = re.compile(r"^add-cap\‍(([^()]+)\‍)$")
463 """Matches an add-cap flag (optional capability)"""
464
465 REMOVE_CAP_PATTERN = re.compile(r"^remove-cap\‍(([^()]+)\‍)$")
466 """Matches a remove-cap flag (optional capability)"""
467
468 is_key: bool = False
469 """Whether the field is a key field"""
470
471 diff: bool = False
472 """Whether the field should be deep-diffed for transmission"""
473
474 add_caps: "set[str]"
475 """The capabilities required to enable the field"""
476
477 remove_caps: "set[str]"
478 """The capabilities that disable the field"""
479
480 @classmethod
481 @cache
482 def parse(cls, flags_text: str) -> "FieldFlags":
483 """Parse a FieldFlags object from a comma-separated flag line"""
484 return cls(
485 stripped
486 for flag in flags_text.split(",")
487 for stripped in (flag.strip(),)
488 if stripped
489 )
490
491 def __init__(self, flag_texts: typing.Iterable[str]):
492 self.add_caps = set()
493 self.remove_caps = set()
494
495 for flag in flag_texts:
496 if flag == "key":
497 self.is_key = True
498 continue
499 if flag == "diff":
500 self.diff = True
501 continue
502 mo = __class__.ADD_CAP_PATTERN.fullmatch(flag)
503 if mo is not None:
504 self.add_caps.add(mo.group(1))
505 continue
506 mo = __class__.REMOVE_CAP_PATTERN.fullmatch(flag)
507 if mo is not None:
508 self.remove_caps.add(mo.group(1))
509 continue
510 raise ValueError(f"unrecognized flag in field declaration: {flag}")
511
512 contradictions = self.add_caps & self.remove_caps
513 if contradictions:
514 raise ValueError("cannot have same capabilities as both add-cap and remove-cap: " + ", ".join(contradictions))
515
516
517class SizeInfo:
518 """Information about size along one dimension of an array or other sized
519 field type. Contains both the declared / maximum size, and the actual
520 used size (if different)."""
521
522 ARRAY_SIZE_PATTERN = re.compile(r"^([^:]+)(?:\:([^:]+))?$")
523 """Matches an array size declaration (without the brackets)
524
525 Groups:
526 - the declared / maximum size
527 - the field name for the actual size (optional)"""
528
529 declared: Location
530 """Maximum size; used in declarations"""
531
532 actual: Location
533 """Location of the field to use for the actual size."""
534
535 @classmethod
536 @cache
537 def parse(cls, size_text) -> "SizeInfo":
538 """Parse the given array size text (without brackets)"""
539 mo = cls.ARRAY_SIZE_PATTERN.fullmatch(size_text)
540 if mo is None:
541 raise ValueError(f"invalid array size declaration: [{size_text}]")
542 return cls(*mo.groups())
543
544 def __init__(self, declared: str, actual: "str | Location | None"):
545 self.declared = Location.constant(declared)
546 if actual is None:
547 self.actual = self.declared
548 elif isinstance(actual, Location):
549 self.actual = actual
550 else:
551 self.actual = Location(actual)
552
553 @property
554 def constant(self) -> bool:
555 """Whether the actual size doesn't change"""
556 # Only check whether we were initialized with no actual size;
557 # we're ignoring the possibility of a constant custom actual size
558 return self.actual is self.declared
559
560 def size_check_get(self, field_name: str, packet: str) -> str:
561 """Generate a code snippet checking whether the received size is in
562 range when receiving a packet."""
563 if self.constant:
564 return ""
565 return f"""\
566if ({self.actual @ packet} > {self.declared}) {{
567 RECEIVE_PACKET_FIELD_ERROR({field_name}, ": array truncated");
568}}
569"""
570
571 def size_check_index(self, field_name: str, packet: str) -> str:
572 """Generate a code snippet asserting that indices can be correctly
573 transmitted for array-diff."""
574 if self.constant:
575 return f"""\
576FC_STATIC_ASSERT({self.declared} <= MAX_UINT16, packet_array_too_long_{field_name});
577"""
578 else:
579 return f"""\
580fc_assert({self.actual @ packet} <= MAX_UINT16);
581"""
582
583 def index_put(self, packet: str, index: str) -> str:
584 """Generate a code snippet writing the given value to the network
585 output, encoded as the appropriate index type"""
586 if self.constant:
587 return f"""\
588#if {self.declared} <= MAX_UINT8
589e |= DIO_PUT(uint8, &dout, &field_addr, {index});
590#else
591e |= DIO_PUT(uint16, &dout, &field_addr, {index});
592#endif
593"""
594 else:
595 return f"""\
596if ({self.actual @ packet} <= MAX_UINT8) {{
597 e |= DIO_PUT(uint8, &dout, &field_addr, {index});
598}} else {{
599 e |= DIO_PUT(uint16, &dout, &field_addr, {index});
600}}
601"""
602
603 def index_get(self, packet: str, location: Location) -> str:
604 """Generate a code snippet reading the next index from the
605 network input decoded as the correct type"""
606 if self.constant:
607 return f"""\
608#if {self.declared} <= MAX_UINT8
609if (!DIO_GET(uint8, &din, &field_addr, &{location.index})) {{
610#else
611if (!DIO_GET(uint16, &din, &field_addr, &{location.index})) {{
612#endif
613 RECEIVE_PACKET_FIELD_ERROR({location.name});
614}}
615"""
616 else:
617 return f"""\
618if (({self.actual @ packet} <= MAX_UINT8)
619 ? !DIO_GET(uint8, &din, &field_addr, &{location.index})
620 : !DIO_GET(uint16, &din, &field_addr, &{location.index})) {{
621 RECEIVE_PACKET_FIELD_ERROR({location.name});
622}}
623"""
624
625 def __str__(self) -> str:
626 if self.constant:
627 return f"{self.declared}"
628 elif self.actual.name != self.actual @ None:
629 # custom location
630 return "*"
631 else:
632 return f"{self.declared}:{self.actual.name}"
633
634 def __eq__(self, other) -> bool:
635 if other is self:
636 return True
637 if not isinstance(other, __class__):
638 return NotImplemented
639 return (self.declared == other.declared
640 and self.actual == other.actual)
641
642 def __hash__(self) -> int:
643 return hash((__class__, self.declared, self.actual))
644
645
646class RawFieldType(ABC):
647 """Abstract base class (ABC) for classes representing types defined in a
648 packets definition file. These types may require the addition of a size
649 in order to be usable; see the array() method and the FieldType class."""
650
651 @abstractmethod
652 def array(self, size: SizeInfo) -> "FieldType":
653 """Add an array size to this field type, either to make a type which
654 needs a size fully usable, or to make an array type with self as
655 the element type."""
656 raise NotImplementedError
657
658 @abstractmethod
659 def __str__(self) -> str:
660 return super().__str__()
661
662 def __repr__(self) -> str:
663 return f"<{self.__class__.__name__} {self}>"
664
665
666FieldTypeConstructor: "typing.TypeAlias" = typing.Callable[[str, str], RawFieldType]
667
668class TypeRegistry:
669 """Determines what Python class to use for field types based on their
670 dataio type and public type."""
671
672 TYPE_INFO_PATTERN = re.compile(r"^([^()]*)\‍(([^()]*)\‍)$")
673 """Matches a field type.
674
675 Groups:
676 - dataio type
677 - public type (aka struct type)"""
678
679 dataio_types: "dict[str, FieldTypeConstructor]"
680 """Dictionary mapping dataio types to the constructor used for
681 field types with that dataio type.
682
683 This is the primary factor deciding what constructor to use for a
684 given field type."""
685
686 dataio_patterns: "dict[typing.Pattern[str], FieldTypeConstructor]"
687 """Dictionary mapping RegEx patterns to the constructor used for
688 field types whose dataio type matches that pattern.
689
690 Matches are cached in self.dataio_types."""
691
692 public_types: "dict[str, FieldTypeConstructor]"
693 """Like self.dataio_types, but for public types.
694
695 This is only checked if there are no matches in self.dataio_types
696 and self.dataio_patterns."""
697
698 public_patterns: "dict[typing.Pattern[str], FieldTypeConstructor]"
699 """Like self.dataio_patterns, but for public types.
700
701 Matches are cached in self.public_types."""
702
703 fallback: FieldTypeConstructor
704 """Fallback constructor used when there are no matches for either
705 dataio type or public type."""
706
707 @staticmethod
708 def _resolve(
709 key: str,
710 direct: "dict[str, FieldTypeConstructor]",
711 patterns: "dict[typing.Pattern[str], FieldTypeConstructor]",
712 fallback: FieldTypeConstructor
713 ) -> FieldTypeConstructor:
714 """Helper function. Figures out which constructor to use for a given
715 key (dataio type or public type), and caches the result."""
716 try:
717 return direct[key]
718 except KeyError:
719 pass
720
721 for pat, ctor in patterns.items():
722 mo = pat.fullmatch(key)
723 if mo is not None:
724 # cache the match
725 direct[key] = ctor
726 return ctor
727
728 # cache that there was no match
729 direct[key] = fallback
730 return fallback
731
732 def __init__(self, fallback: FieldTypeConstructor):
733 self.dataio_types = {}
734 self.dataio_patterns = {}
735 self.public_types = {}
736 self.public_patterns = {}
737 self.fallback = fallback
738
739 def parse(self, type_text: str) -> RawFieldType:
740 """Parse a single field type"""
741 mo = __class__.TYPE_INFO_PATTERN.fullmatch(type_text)
742 if mo is None:
743 raise ValueError(f"malformed type or undefined alias: {type_text!r}")
744 return self(*mo.groups())
745
746 def __call__(self, dataio_type: str, public_type: str) -> RawFieldType:
747 ctor = self._resolve(dataio_type, self.dataio_types, self.dataio_patterns, self._by_public)
748 return ctor(dataio_type, public_type)
749
750 def _by_public(self, dataio_type: str, public_type: str) -> RawFieldType:
751 ctor = self._resolve(public_type, self.public_types, self.public_patterns, self.fallback)
752 return ctor(dataio_type, public_type)
753
754
755class NeedSizeType(RawFieldType):
756 """Helper class for field types that require a size to be usable."""
757
758 dataio_type: str
759 """The dataio type passed to self.cls"""
760
761 public_type: str
762 """The public type passed to self.cls"""
763
764 cls: typing.Callable[[str, str, SizeInfo], "FieldType"]
765 """The field type constructed when adding a size to this type"""
766
767 def __init__(self, dataio_type: str, public_type: str, cls: typing.Callable[[str, str, SizeInfo], "FieldType"]):
768 self.dataio_type = dataio_type
769 self.public_type = public_type
770 self.cls = cls
771
772 def array(self, size: SizeInfo) -> "FieldType":
773 """Add an array size to make a usable type."""
774 return self.cls(self.dataio_type, self.public_type, size)
775
776 def __str__(self) -> str:
777 return f"{self.dataio_type}({self.public_type})"
778
779
780class FieldType(RawFieldType):
781 """Abstract base class (ABC) for classes representing type information
782 usable for fields of a packet"""
783
784 foldable: bool = False
785 """Whether a field of this type can be folded into the packet header"""
786
787 complex: bool = False
788 """Whether a field of this type needs special handling when initializing,
789 copying or destroying the packet struct"""
790
791 @cache
792 def array(self, size: SizeInfo) -> "FieldType":
793 """Construct a FieldType for an array with element type self and the
794 given size"""
795 if f"{size.declared}" == "*":
796 raise ValueError(f"vectors not supported for field type {self}")
797 return ArrayType(self, size)
798
799 @abstractmethod
800 def get_code_declaration(self, location: Location) -> str:
801 """Generate a code snippet declaring a field with this type in a
802 packet struct."""
803 raise NotImplementedError
804
805 @abstractmethod
806 def get_code_param(self, location: Location) -> str:
807 """Generate a code fragment declaring a parameter with this type for
808 a handle function.
809
810 See also self.get_code_handle_arg()"""
811 raise NotImplementedError
812
813 def get_code_handle_arg(self, location: Location, packet: str) -> str:
814 """Generate a code fragment passing an argument with this type to a
815 handle function.
816
817 See also self.get_code_param()"""
818 return f"{location @ packet}"
819
820 def get_code_init(self, location: Location, packet: str) -> str:
821 """Generate a code snippet initializing a field of this type in the
822 packet struct, after the struct has already been zeroed.
823
824 Subclasses must override this if self.complex is True"""
825 if self.complex:
826 raise ValueError(f"default get_code_init implementation called for field {location.name} with complex type {self!r}")
827 # no work needed
828 return ""
829
830 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
831 """Generate a code snippet deep-copying a field of this type from
832 one packet struct to another that has already been initialized.
833
834 Subclasses must override this if self.complex is True"""
835 if self.complex:
836 raise ValueError(f"default get_code_copy implementation called for field {location.name} with complex type {self!r}")
837 return f"""\
838{location @ dest} = {location @ src};
839"""
840
841 def get_code_fill(self, location: Location, packet: str) -> str:
842 """Generate a code snippet shallow-copying a value of this type from
843 dsend arguments into a packet struct."""
844 return f"""\
845{location @ packet} = {location @ None};
846"""
847
848 def get_code_free(self, location: Location, packet: str) -> str:
849 """Generate a code snippet deinitializing a field of this type in
850 the packet struct before it gets destroyed.
851
852 Subclasses must override this if self.complex is True"""
853 if self.complex:
854 raise ValueError(f"default get_code_free implementation called for field {location.name} with complex type {self!r}")
855 # no work needed
856 return ""
857
858 @abstractmethod
859 def get_code_hash(self, location: Location, packet: str) -> str:
860 """Generate a code snippet factoring a field of this type into a
861 hash computation's `result`."""
862 raise NotImplementedError
863
864 @abstractmethod
865 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
866 """Generate a code snippet comparing a field of this type between
867 the given packets and setting `differ` accordingly. The `old`
868 packet is one we know to have been initialized by our own code."""
869 raise NotImplementedError
870
871 @abstractmethod
872 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
873 """Generate a code snippet writing a field of this type to the
874 dataio stream."""
875 raise NotImplementedError
876
877 @abstractmethod
878 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
879 """Generate a code snippet reading a field of this type from the
880 dataio stream."""
881 raise NotImplementedError
882
883 def _compat_keys(self, location: Location, packet: str):
884 """Internal helper function. Yield keys to compare for
885 type compatibility. See is_type_compatible()"""
886 yield self.get_code_declaration(location)
887 yield self.get_code_param(location)
888 yield self.get_code_handle_arg(location, packet)
889 yield self.get_code_fill(location, packet)
890 yield self.complex
891 if self.complex:
892 yield self.get_code_init(location, packet)
893 yield self.get_code_free(location, packet)
894
895 def is_type_compatible(self, other: "FieldType") -> bool:
896 """Determine whether two field types can be used interchangeably as
897 part of the packet struct, i.e. differ in dataio transmission only"""
898 if other is self:
899 return True
900 loc = Location("compat_test_field_name")
901 pak = "compat_test_packet_name"
902 return all(
903 a == b
904 for a, b in zip_longest(
905 self._compat_keys(loc, pak),
906 other._compat_keys(loc, pak),
907 )
908 )
909
910
911class BasicType(FieldType):
912 """Type information for a field without any specialized treatment"""
913
914 dataio_type: str
915 """How fields of this type are transmitted over network"""
916
917 public_type: str
918 """How fields of this type are represented in C code"""
919
920 def __init__(self, dataio_type: str, public_type: str):
921 self.dataio_type = dataio_type
922 self.public_type = public_type
923
924 def get_code_declaration(self, location: Location) -> str:
925 return f"""\
926{self.public_type} {location @ None};
927"""
928
929 def get_code_param(self, location: Location) -> str:
930 return f"{self.public_type} {location @ None}"
931
932 def get_code_hash(self, location: Location, packet: str) -> str:
933 raise ValueError(f"hash not supported for type {self} in field {location.name}")
934
935 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
936 return f"""\
937differ = ({location @ old} != {location @ new});
938"""
939
940 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
941 return f"""\
942e |= DIO_PUT({self.dataio_type}, &dout, &field_addr, {location @ packet});
943"""
944
945 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
946 return f"""\
947if (!DIO_GET({self.dataio_type}, &din, &field_addr, &{location @ packet})) {{
948 RECEIVE_PACKET_FIELD_ERROR({location.name});
949}}
950"""
951
952 def __str__(self) -> str:
953 return f"{self.dataio_type}({self.public_type})"
954
955DEFAULT_REGISTRY: "typing.Final" = TypeRegistry(BasicType)
956"""The default type registry used by a PacketsDefinition when no other
957registry is given."""
958
959
960class IntType(BasicType):
961 """Type information for an integer field"""
962
963 TYPE_PATTERN = re.compile(r"^[su]int\d+$")
964 """Matches an int dataio type"""
965
966 @typing.overload
967 def __init__(self, dataio_info: str, public_type: str): ...
968 @typing.overload
969 def __init__(self, dataio_info: "re.Match[str]", public_type: str): ...
970 def __init__(self, dataio_info: "str | re.Match[str]", public_type: str):
971 if isinstance(dataio_info, str):
972 mo = self.TYPE_PATTERN.fullmatch(dataio_info)
973 if mo is None:
974 raise ValueError("not a valid int type")
975 dataio_info = mo
976 dataio_type = dataio_info.group(0)
977
978 super().__init__(dataio_type, public_type)
979
980 def get_code_hash(self, location: Location, packet: str) -> str:
981 return f"""\
982result += {location @ packet};
983"""
984
985 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
986 if self.public_type in ("int", "bool"):
987 # read directly
988 return super().get_code_get(location, packet, deep_diff)
989 # read indirectly to make sure coercions between different integer
990 # types happen correctly
991 return f"""\
992{{
993 int readin;
994
995 if (!DIO_GET({self.dataio_type}, &din, &field_addr, &readin)) {{
996 RECEIVE_PACKET_FIELD_ERROR({location.name});
997 }}
998 {location @ packet} = readin;
999}}
1000"""
1001
1002DEFAULT_REGISTRY.dataio_patterns[IntType.TYPE_PATTERN] = IntType
1003
1004
1005class BoolType(IntType):
1006 """Type information for a boolean field"""
1007
1008 TYPE_PATTERN = re.compile(r"^bool\d*$")
1009 """Matches a bool dataio type"""
1010
1011 foldable: bool = True
1012
1013 @typing.overload
1014 def __init__(self, dataio_info: str, public_type: str): ...
1015 @typing.overload
1016 def __init__(self, dataio_info: "re.Match[str]", public_type: str): ...
1017 def __init__(self, dataio_info: "str | re.Match[str]", public_type: str):
1018 if isinstance(dataio_info, str):
1019 mo = self.TYPE_PATTERN.fullmatch(dataio_info)
1020 if mo is None:
1021 raise ValueError("not a valid bool type")
1022 dataio_info = mo
1023
1024 if public_type != "bool":
1025 raise ValueError(f"bool dataio type with non-bool public type: {public_type!r}")
1026
1027 super().__init__(dataio_info, public_type)
1028
1029DEFAULT_REGISTRY.dataio_patterns[BoolType.TYPE_PATTERN] = BoolType
1030
1031
1032class FloatType(BasicType):
1033 """Type information for a float field"""
1034
1035 TYPE_PATTERN = re.compile(r"^([su]float)(\d+)?$")
1036 """Matches a float dataio type
1037
1038 Note: Will also match float types without a float factor to avoid
1039 falling back to the default; in this case, the second capturing group
1040 will not match.
1041
1042 Groups:
1043 - non-numeric dataio type
1044 - numeric float factor"""
1045
1046 float_factor: int
1047 """Granularity (fixed-point factor) used to transmit this type in an
1048 integer"""
1049
1050 @typing.overload
1051 def __init__(self, dataio_info: str, public_type: str): ...
1052 @typing.overload
1053 def __init__(self, dataio_info: "re.Match[str]", public_type: str): ...
1054 def __init__(self, dataio_info: "str | re.Match[str]", public_type: str):
1055 if isinstance(dataio_info, str):
1056 mo = self.TYPE_PATTERN.fullmatch(dataio_info)
1057 if mo is None:
1058 raise ValueError("not a valid float type")
1059 dataio_info = mo
1060 dataio_type, float_factor = dataio_info.groups()
1061 if float_factor is None:
1062 raise ValueError(f"float type without float factor: {dataio_info.string!r}")
1063
1064 if public_type != "float":
1065 raise ValueError(f"float dataio type with non-float public type: {public_type!r}")
1066
1067 super().__init__(dataio_type, public_type)
1068 self.float_factor = int(float_factor)
1069
1070 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1071 return f"""\
1072differ = ((int) ({location @ old} * {self.float_factor}) != (int) ({location @ new} * {self.float_factor}));
1073"""
1074
1075 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
1076 return f"""\
1077e |= DIO_PUT({self.dataio_type}, &dout, &field_addr, {location @ packet}, {self.float_factor:d});
1078"""
1079
1080 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
1081 return f"""\
1082if (!DIO_GET({self.dataio_type}, &din, &field_addr, &{location @ packet}, {self.float_factor:d})) {{
1083 RECEIVE_PACKET_FIELD_ERROR({location.name});
1084}}
1085"""
1086
1087 def __str__(self) -> str:
1088 return f"{self.dataio_type}{self.float_factor:d}({self.public_type})"
1089
1090DEFAULT_REGISTRY.dataio_patterns[FloatType.TYPE_PATTERN] = FloatType
1091
1092
1093class BitvectorType(BasicType):
1094 """Type information for a bitvector field"""
1095
1096 def __init__(self, dataio_type: str, public_type: str):
1097 if dataio_type != "bitvector":
1098 raise ValueError("not a valid bitvector type")
1099
1100 super().__init__(dataio_type, public_type)
1101
1102 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1103 return f"""\
1104differ = !BV_ARE_EQUAL({location @ old}, {location @ new});
1105"""
1106
1107 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
1108 return f"""\
1109e |= DIO_BV_PUT(&dout, &field_addr, {location @ packet});
1110"""
1111
1112 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
1113 return f"""\
1114if (!DIO_BV_GET(&din, &field_addr, {location @ packet})) {{
1115 RECEIVE_PACKET_FIELD_ERROR({location.name});
1116}}
1117"""
1118
1119DEFAULT_REGISTRY.dataio_types["bitvector"] = BitvectorType
1120
1121
1122class StructType(BasicType):
1123 """Type information for a field of some general struct type"""
1124
1125 TYPE_PATTERN = re.compile(r"^struct\s+(\w+)$")
1126 """Matches a struct public type
1127
1128 Groups:
1129 - the struct name (without the `struct ` prefix)"""
1130
1131 struct_type: str
1132 """The struct name (without the `struct ` prefix)"""
1133
1134 @typing.overload
1135 def __init__(self, dataio_type: str, public_info: str): ...
1136 @typing.overload
1137 def __init__(self, dataio_type: str, public_info: "re.Match[str]"): ...
1138 def __init__(self, dataio_type: str, public_info: "str | re.Match[str]"):
1139 if isinstance(public_info, str):
1140 mo = self.TYPE_PATTERN.fullmatch(public_info)
1141 if mo is None:
1142 raise ValueError("not a valid struct type")
1143 public_info = mo
1144 struct_type = public_info.group(1)
1145
1146 super().__init__(dataio_type, f"struct {struct_type}")
1147 self.struct_type = struct_type
1148
1149 @cache
1150 def array(self, size: SizeInfo) -> "FieldType":
1151 """Construct a FieldType for an array or vector with element type
1152 self and the given size"""
1153 if f"{size.declared}" == "*":
1154 return SpecvecType(self)
1155 return super().array(size)
1156
1157 def get_code_param(self, location: Location) -> str:
1158 if not location.depth:
1159 # top level: pass by-reference
1160 return "const " + super().get_code_param(location.replace(f"*{location}"))
1161 return super().get_code_param(location)
1162
1163 def get_code_handle_arg(self, location: Location, packet: str) -> str:
1164 # top level: pass by-reference
1165 prefix = "&" if not location.depth else ""
1166 return prefix + super().get_code_handle_arg(location, packet)
1167
1168 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1169 return f"""\
1170differ = !are_{self.dataio_type}s_equal(&{location @ old}, &{location @ new});
1171"""
1172
1173 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
1174 return f"""\
1175e |= DIO_PUT({self.dataio_type}, &dout, &field_addr, &{location @ packet});
1176"""
1177
1178DEFAULT_REGISTRY.public_patterns[StructType.TYPE_PATTERN] = StructType
1179
1180
1181class CmParameterType(StructType):
1182 """Type information for a worklist field"""
1183
1184 def __init__(self, dataio_type: str, public_type: str):
1185 if dataio_type != "cm_parameter":
1186 raise ValueError("not a valid cm_parameter type")
1187
1188 if public_type != "struct cm_parameter":
1189 raise ValueError(f"cm_parameter dataio type with non-cm_parameter public type: {public_type!r}")
1190
1191 super().__init__(dataio_type, public_type)
1192
1193 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1194 return f"""\
1195differ = !cm_are_parameter_equal(&{location @ old}, &{location @ new});
1196"""
1197
1198DEFAULT_REGISTRY.dataio_types["cm_parameter"] = CmParameterType
1199
1200
1201class WorklistType(StructType):
1202 """Type information for a worklist field"""
1203
1204 def __init__(self, dataio_type: str, public_type: str):
1205 if dataio_type != "worklist":
1206 raise ValueError("not a valid worklist type")
1207
1208 if public_type != "struct worklist":
1209 raise ValueError(f"worklist dataio type with non-worklist public type: {public_type!r}")
1210
1211 super().__init__(dataio_type, public_type)
1212
1213 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1214 return f"""\
1215worklist_copy(&{location @ dest}, &{location @ src});
1216"""
1217
1218 def get_code_fill(self, location: Location, packet: str) -> str:
1219 return f"""\
1220worklist_copy(&{location @ packet}, {location @ None});
1221"""
1222
1223DEFAULT_REGISTRY.dataio_types["worklist"] = WorklistType
1224
1225
1226class SizedType(BasicType):
1227 """Abstract base class (ABC) for field types that include a size"""
1228
1229 size: SizeInfo
1230 """Size info (maximum and actual) of this type"""
1231
1232 def __init__(self, dataio_type: str, public_type: str, size: SizeInfo):
1233 super().__init__(dataio_type, public_type)
1234 self.size = size
1235
1236 def get_code_declaration(self, location: Location) -> str:
1237 return super().get_code_declaration(
1238 location.replace(f"{location}[{self.size.declared}]")
1239 )
1240
1241 def get_code_param(self, location: Location) -> str:
1242 # see ArrayType.get_code_param() for explanation
1243 if not location.depth:
1244 return "const " + super().get_code_param(location.replace(f"*{location}"))
1245 else:
1246 return super().get_code_param(location.replace(f"*const {location}"))
1247
1248 @abstractmethod
1249 def get_code_fill(self, location: Location, packet: str) -> str:
1250 return super().get_code_fill(location, packet)
1251
1252 @abstractmethod
1253 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1254 return super().get_code_copy(location, dest, src)
1255
1256 def __str__(self) -> str:
1257 return f"{super().__str__()}[{self.size}]"
1258
1259
1260class StringType(SizedType):
1261 """Type information for a string field"""
1262
1263 def __init__(self, dataio_type: str, public_type: str, size: SizeInfo):
1264 if dataio_type not in ("string", "estring"):
1265 raise ValueError("not a valid string type")
1266
1267 if public_type != "char":
1268 raise ValueError(f"string type with illegal public type: {public_type!r}")
1269
1270 super().__init__(dataio_type, public_type, size)
1271
1272 def get_code_fill(self, location: Location, packet: str) -> str:
1273 return f"""\
1274sz_strlcpy({location @ packet}, {location @ None});
1275"""
1276
1277 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1278 return f"""\
1279sz_strlcpy({location @ dest}, {location @ src});
1280"""
1281
1282 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1283 return f"""\
1284differ = (strcmp({location @ old}, {location @ new}) != 0);
1285"""
1286
1287 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
1288 return f"""\
1289if (!DIO_GET({self.dataio_type}, &din, &field_addr, {location @ packet}, sizeof({location @ packet}))) {{
1290 RECEIVE_PACKET_FIELD_ERROR({location.name});
1291}}
1292"""
1293
1294
1295class MemoryType(SizedType):
1296 """Type information for a memory field"""
1297
1298 def __init__(self, dataio_type: str, public_type: str, size: SizeInfo):
1299 if dataio_type != "memory":
1300 raise ValueError("not a valid memory type")
1301
1302 super().__init__(dataio_type, public_type, size)
1303
1304 def get_code_fill(self, location: Location, packet: str) -> str:
1305 raise NotImplementedError("fill not supported for memory-type fields")
1306
1307 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1308 return f"""\
1309memcpy({location @ dest}, {location @ src}, {self.size.actual @ src});
1310"""
1311
1312 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1313 if self.size.constant:
1314 return f"""\
1315differ = (memcmp({location @ old}, {location @ new}, {self.size.declared}) != 0);
1316"""
1317 return f"""\
1318differ = (({self.size.actual @ old} != {self.size.actual @ new})
1319 || (memcmp({location @ old}, {location @ new}, {self.size.actual @ new}) != 0));
1320"""
1321
1322 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
1323 return f"""\
1324e |= DIO_PUT({self.dataio_type}, &dout, &field_addr, &{location @ packet}, {self.size.actual @ packet});
1325"""
1326
1327 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
1328 return f"""\
1329{self.size.size_check_get(location.name, packet)}\
1330if (!DIO_GET({self.dataio_type}, &din, &field_addr, {location @ packet}, {self.size.actual @ packet})) {{
1331 RECEIVE_PACKET_FIELD_ERROR({location.name});
1332}}
1333"""
1334
1335DEFAULT_REGISTRY.dataio_types["memory"] = partial(NeedSizeType, cls = MemoryType)
1336
1337
1338class SequenceType(FieldType):
1339 """Abstract base class (ABC) for field types representing homogeneous
1340 sequences of elements"""
1341
1342 @abstractmethod
1343 def size_at(self, location: Location) -> SizeInfo:
1344 """Return a size info object for a field of this type"""
1345 raise NotImplementedError()
1346
1347 def resize(self, location: Location, packet: str, new_size: str) -> str:
1348 """Return a code snippet updating the size of a field of this type,
1349 or the empty string if this type doesn't need to individually transmit
1350 and update its size"""
1351 return ""
1352
1353 def null_condition(self, location: Location) -> "Location | None":
1354 """Return a code condition that if TRUE means the field cannot be used
1355 normally and should be treated as empty, or None if this never happens"""
1356 return None
1357
1358 @abstractmethod
1359 def inner_cmp(self, location: Location, new: str, old: str) -> str:
1360 """Generate the loop body for get_code_cmp()
1361
1362 This is placed at the very beginning of a scope, so variable
1363 declarations are permitted without requiring an extra block."""
1364 raise NotImplementedError()
1365
1366 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
1367 size = self.size_at(location)
1368 inner_cmp = prefix(" ", self.inner_cmp(location, new, old))
1369
1370 null_con = self.null_condition(location)
1371 if null_con is None:
1372 safe_size = size
1373 else:
1374 # new packet is passed in from outside, might be null
1375 safe_size = SizeInfo(size.declared @ None, size.actual.replace(f"{null_con} ? 0 : {size.actual}"))
1376 if not safe_size.constant:
1377 # ends mid-line
1378 head = f"""\
1379differ = ({size.actual @ old} != {safe_size.actual @ new});
1380if (!differ) """
1381 else:
1382 head = f"""\
1383differ = FALSE;
1384"""
1385
1386 return f"""\
1387{head}{{
1388 int {location.index};
1389
1390 for ({location.index} = 0; {location.index} < {size.actual @ old}; {location.index}++) {{
1391{inner_cmp}\
1392 if (differ) {{
1393 break;
1394 }}
1395 }}
1396}}
1397"""
1398
1399 @abstractmethod
1400 def inner_put(self, location: Location, packet: str,
1401 diff_packet: "str | None" = None, json_step: int = 1) -> str:
1402 """Generate the main part of the loop body for get_code_put()"""
1403 raise NotImplementedError()
1404
1405 def get_code_put_full(self, location: Location, packet: str) -> str:
1406 """Generate put code without array-diff."""
1407 size = self.size_at(location)
1408 inner_put = prefix(" ", self.inner_put(location, packet))
1409
1410 send_size = bool(self.resize(location, packet, "s"))
1411 if send_size:
1412 # Note: strictly speaking, we could allow size == MAX_UINT16,
1413 # but we might want to use that in the future to signal overlong
1414 # vectors (like with jumbo packets)
1415 # Though that would also mean packets larger than 64 KiB,
1416 # which we're a long way from
1417 size_part = f"""\
1418 fc_assert({size.actual @ packet} < MAX_UINT16);
1419 e |= DIO_PUT(arraylen, &dout, &field_addr, {size.actual @ packet});
1420
1421#ifdef FREECIV_JSON_CONNECTION
1422"""
1423 else:
1424 size_part = f"""\
1425#ifdef FREECIV_JSON_CONNECTION
1426 /* Create the array. */
1427 e |= DIO_PUT(farray, &dout, &field_addr, {size.actual @ packet});
1428
1429"""
1430
1431 null_condition = self.null_condition(location)
1432 if null_condition is None:
1433 head = ""
1434 elif send_size:
1435 # ends mid-line
1436 head = f"""\
1437if ({null_condition @ packet}) {{
1438 /* Transmit null as empty */
1439 e |= DIO_PUT(arraylen, &dout, &field_addr, 0);
1440}} else """
1441 else:
1442 # ends mid-line
1443 head = f"""\
1444if ({null_condition @ packet}) {{
1445 /* Transmit null as empty */
1446#ifdef FREECIV_JSON_CONNECTION
1447 /* Create the array. */
1448 e |= DIO_PUT(farray, &dout, &field_addr, 0);
1449#endif /* FREECIV_JSON_CONNECTION */
1450}} else """
1451
1452 return f"""\
1453{head}{{
1454 int {location.index};
1455
1456{size_part}\
1457 /* Enter array. */
1458 {location.json_subloc} = plocation_elem_new(0);
1459#endif /* FREECIV_JSON_CONNECTION */
1460
1461 for ({location.index} = 0; {location.index} < {size.actual @ packet}; {location.index}++) {{
1462#ifdef FREECIV_JSON_CONNECTION
1463 /* Next array element. */
1464 {location.json_subloc}->number = {location.index};
1465#endif /* FREECIV_JSON_CONNECTION */
1466
1467{inner_put}\
1468 }}
1469
1470#ifdef FREECIV_JSON_CONNECTION
1471 /* Exit array. */
1472 FC_FREE({location.json_subloc});
1473#endif /* FREECIV_JSON_CONNECTION */
1474}}
1475"""
1476
1477 def get_code_put_diff(self, location: Location, packet: str, diff_packet: str) -> str:
1478 """Generate array-diff put code."""
1479 size = self.size_at(location)
1480
1481 null_condition = self.null_condition(location)
1482 if null_condition is not None:
1483 safe_size = SizeInfo(size.declared @ None, size.actual.replace(f"({null_condition} ? 0 : {size.actual})"))
1484 else:
1485 safe_size = size
1486
1487 if self.resize(location, packet, "s"):
1488 # starts and ends inside JSON ifdef
1489 size_head = f"""\
1490
1491 /* Create the object to hold new size and delta. */
1492 e |= DIO_PUT(object, &dout, &field_addr);
1493
1494 /* Enter object (start at size address). */
1495 {location.json_subloc} = plocation_field_new("size");
1496#endif /* FREECIV_JSON_CONNECTION */
1497
1498 /* Write the new size */
1499 e |= DIO_PUT(uint16, &dout, &field_addr, {safe_size.actual @ packet});
1500
1501#ifdef FREECIV_JSON_CONNECTION
1502 /* Delta address. */
1503 {location.json_subloc}->name = "delta";
1504"""
1505 # starts and ends inside JSON ifdef
1506 size_tail = f"""\
1507 /* Exit object. */
1508 FC_FREE({location.json_subloc});
1509"""
1510 # everything else is nested one level deeper than usual
1511 location.json_depth += 1
1512 else:
1513 size_head = size_tail = ""
1514
1515 # Note: At the moment, we're only deep-diffing our elements
1516 # if our array size is constant
1517 value_put = prefix(" ", self.inner_put(location, packet, diff_packet if size.constant else None, 2))
1518 inner_cmp = prefix(" ", self.inner_cmp(location, packet, diff_packet))
1519 index_put = prefix(" ", size.index_put(packet, location.index))
1520 index_put_sentinel = prefix(" ", safe_size.index_put(packet, safe_size.actual @ packet))
1521 size_check = prefix(" ", safe_size.size_check_index(location.name, packet))
1522
1523 if not size.constant:
1524 inner_cmp = f"""\
1525 if ({location.index} < {size.actual @ diff_packet}) {{
1526{prefix(" ", inner_cmp)}\
1527 }} else {{
1528 /* Always transmit new elements */
1529 differ = TRUE;
1530 }}
1531"""
1532
1533 return f"""\
1534{{
1535 int {location.index};
1536
1537{size_check}\
1538
1539#ifdef FREECIV_JSON_CONNECTION
1540 size_t count_{location.index} = 0;
1541{size_head}\
1542
1543 /* Create the array. */
1544 e |= DIO_PUT(farray, &dout, &field_addr, 0);
1545
1546 /* Enter array. */
1547 {location.json_subloc} = plocation_elem_new(0);
1548#endif /* FREECIV_JSON_CONNECTION */
1549
1550 for ({location.index} = 0; {location.index} < {safe_size.actual @ packet}; {location.index}++) {{
1551{inner_cmp}\
1552
1553 if (!differ) {{
1554 continue;
1555 }}
1556
1557#ifdef FREECIV_JSON_CONNECTION
1558 /* Append next diff array element. */
1559 {location.json_subloc}->number = -1;
1560
1561 /* Create the diff array element. */
1562 e |= DIO_PUT(object, &dout, &field_addr);
1563
1564 /* Enter diff array element (start at the index address). */
1565 {location.json_subloc}->number = count_{location.index}++;
1566 {location.json_subloc}->sub_location = plocation_field_new("index");
1567#endif /* FREECIV_JSON_CONNECTION */
1568
1569 /* Write the index */
1570{index_put}\
1571
1572#ifdef FREECIV_JSON_CONNECTION
1573 /* Content address. */
1574 {location.json_subloc}->sub_location->name = "data";
1575#endif /* FREECIV_JSON_CONNECTION */
1576
1577{value_put}\
1578
1579#ifdef FREECIV_JSON_CONNECTION
1580 /* Exit diff array element. */
1581 FC_FREE({location.json_subloc}->sub_location);
1582#endif /* FREECIV_JSON_CONNECTION */
1583 }}
1584
1585#ifdef FREECIV_JSON_CONNECTION
1586 /* Append diff array element. */
1587 {location.json_subloc}->number = -1;
1588
1589 /* Create the terminating diff array element. */
1590 e |= DIO_PUT(object, &dout, &field_addr);
1591
1592 /* Enter diff array element (start at the index address). */
1593 {location.json_subloc}->number = count_{location.index};
1594 {location.json_subloc}->sub_location = plocation_field_new("index");
1595#endif /* FREECIV_JSON_CONNECTION */
1596
1597 /* Write the sentinel value */
1598{index_put_sentinel}\
1599
1600#ifdef FREECIV_JSON_CONNECTION
1601 /* Exit diff array element. */
1602 FC_FREE({location.json_subloc}->sub_location);
1603 /* Exit array. */
1604 FC_FREE({location.json_subloc});
1605{size_tail}\
1606#endif /* FREECIV_JSON_CONNECTION */
1607}}
1608"""
1609
1610 def get_code_put(self, location: Location, packet: str, diff_packet: "str | None" = None) -> str:
1611 if diff_packet is not None:
1612 return self.get_code_put_diff(location, packet, diff_packet)
1613 else:
1614 return self.get_code_put_full(location, packet)
1615
1616 @abstractmethod
1617 def inner_get(self, location:Location, packet: str,
1618 deep_diff: bool = False, json_step: int = 1) -> str:
1619 """Generate the main part of the loop body for get_code_get()"""
1620 raise NotImplementedError()
1621
1622 def get_code_get_full(self, location: Location, packet: str) -> str:
1623 """Generate get code without array-diff."""
1624 size = self.size_at(location)
1625 inner_get = prefix(" ", self.inner_get(location, packet))
1626
1627 resize = self.resize(location, packet, location.index)
1628 if resize:
1629 size_part = f"""\
1630 if (!DIO_GET(arraylen, &din, &field_addr, &{location.index})) {{
1631 RECEIVE_PACKET_FIELD_ERROR({location.name});
1632 }}
1633{prefix(" ", resize)}\
1634"""
1635 else:
1636 size_part = prefix(" ", size.size_check_get(location.name, packet))
1637
1638 return f"""\
1639{{
1640 int {location.index};
1641
1642{size_part}\
1643
1644#ifdef FREECIV_JSON_CONNECTION
1645 /* Enter array. */
1646 {location.json_subloc} = plocation_elem_new(0);
1647#endif /* FREECIV_JSON_CONNECTION */
1648
1649 for ({location.index} = 0; {location.index} < {size.actual @ packet}; {location.index}++) {{
1650#ifdef FREECIV_JSON_CONNECTION
1651 /* Next array element */
1652 {location.json_subloc}->number = {location.index};
1653#endif /* FREECIV_JSON_CONNECTION */
1654
1655{inner_get}\
1656 }}
1657
1658#ifdef FREECIV_JSON_CONNECTION
1659 /* Exit array. */
1660 FC_FREE({location.json_subloc});
1661#endif /* FREECIV_JSON_CONNECTION */
1662}}
1663"""
1664
1665 def get_code_get_diff(self, location: Location, packet: str) -> str:
1666 """Generate array-diff get code."""
1667 size = self.size_at(location)
1668
1669 resize = self.resize(location, packet, location.index)
1670 if resize:
1671 # ends inside a JSON ifdef
1672 size_head = f"""\
1673#ifdef FREECIV_JSON_CONNECTION
1674/* Enter object (start at size address). */
1675{location.json_subloc} = plocation_field_new("size");
1676#endif /* FREECIV_JSON_CONNECTION */
1677
1678{{
1679 int readin;
1680
1681 if (!DIO_GET(uint16, &din, &field_addr, &readin)) {{
1682 RECEIVE_PACKET_FIELD_ERROR({location.name});
1683 }}
1684{prefix(" ", resize)}\
1685}}
1686
1687#ifdef FREECIV_JSON_CONNECTION
1688/* Delta address. */
1689{location.json_subloc}->name = "delta";
1690"""
1691 size_tail = f"""\
1692/* Exit diff object. */
1693FC_FREE({location.json_subloc});
1694"""
1695 location.json_depth += 1
1696 else:
1697 size_head = f"""\
1698{prefix(" ", size.size_check_get(location.name, packet))}\
1699#ifdef FREECIV_JSON_CONNECTION
1700"""
1701 size_tail = ""
1702
1703 # Note: At the moment, we're only deep-diffing our elements
1704 # if our array size is constant
1705 value_get = prefix(" ", self.inner_get(location, packet, size.constant, 2))
1706 index_get = prefix(" ", size.index_get(packet, location))
1707
1708 return f"""\
1709{size.size_check_index(location.name, packet)}\
1710{size_head}\
1711/* Enter array (start at initial element). */
1712{location.json_subloc} = plocation_elem_new(0);
1713/* Enter diff array element (start at the index address). */
1714{location.json_subloc}->sub_location = plocation_field_new("index");
1715#endif /* FREECIV_JSON_CONNECTION */
1716
1717while (TRUE) {{
1718 int {location.index};
1719
1720 /* Read next index */
1721{index_get}\
1722
1723 if ({location.index} == {size.actual @ packet}) {{
1724 break;
1725 }}
1726 if ({location.index} > {size.actual @ packet}) {{
1727 RECEIVE_PACKET_FIELD_ERROR({location.name},
1728 ": unexpected index %d "
1729 "> length %d in array diff",
1730 {location.index},
1731 {size.actual @ packet});
1732 }}
1733
1734#ifdef FREECIV_JSON_CONNECTION
1735 /* Content address. */
1736 {location.json_subloc}->sub_location->name = "data";
1737#endif /* FREECIV_JSON_CONNECTION */
1738
1739{value_get}\
1740
1741#ifdef FREECIV_JSON_CONNECTION
1742 /* Move to the next diff array element. */
1743 {location.json_subloc}->number++;
1744 /* Back to the index address. */
1745 {location.json_subloc}->sub_location->name = "index";
1746#endif /* FREECIV_JSON_CONNECTION */
1747}}
1748
1749#ifdef FREECIV_JSON_CONNECTION
1750/* Exit diff array element. */
1751FC_FREE({location.json_subloc}->sub_location);
1752/* Exit array. */
1753FC_FREE({location.json_subloc});
1754{size_tail}\
1755#endif /* FREECIV_JSON_CONNECTION */
1756"""
1757
1758 def get_code_get(self, location: Location, packet: str, deep_diff: bool = False) -> str:
1759 if deep_diff:
1760 return self.get_code_get_diff(location, packet)
1761 else:
1762 return self.get_code_get_full(location, packet)
1763
1764
1765class ArrayType(SequenceType):
1766 """Type information for an array field. Consists of size information and
1767 another FieldType for the array's elements, which may also be an
1768 ArrayType (for multi-dimensional arrays) or another SequenceType."""
1769
1770 elem: FieldType
1771 """The type of the array elements"""
1772
1773 size: SizeInfo
1774 """The length (maximum and actual) of the array"""
1775
1776 def __init__(self, elem: FieldType, size: SizeInfo):
1777 self.elem = elem
1778 self.size = size
1779
1780 @property
1781 def complex(self) -> bool:
1782 return self.elem.complex
1783
1784 def get_code_declaration(self, location: Location) -> str:
1785 return self.elem.get_code_declaration(
1786 location.deeper(f"{location}[{self.size.declared}]")
1787 )
1788
1789 def get_code_param(self, location: Location) -> str:
1790 # When changing this, update SizedType.get_code_param() accordingly
1791
1792 # Note: If we're fine with writing `foo_t const *fieldname`,
1793 # we'd only need one case, .deeper(f"const *{location}")
1794 if not location.depth:
1795 # foo_t fieldname ~> const foo_t *fieldname
1796 return "const " + self.elem.get_code_param(location.deeper(f"*{location}"))
1797 else:
1798 # const foo_t *fieldname ~> const foo_t *const *fieldname
1799 # the final * is already part of the location
1800 return self.elem.get_code_param(location.deeper(f"*const {location}"))
1801
1802 def get_code_init(self, location: Location, packet: str) -> str:
1803 if not self.complex:
1804 return super().get_code_init(location, packet)
1805 inner_init = prefix(" ", self.elem.get_code_init(location.sub, packet))
1806 # Note: we're initializing and destroying *all* elements of the array,
1807 # not just those up to the actual size; otherwise we'd have to
1808 # dynamically initialize and destroy elements as the actual size changes
1809 return f"""\
1810{{
1811 int {location.index};
1812
1813 for ({location.index} = 0; {location.index} < {self.size.declared}; {location.index}++) {{
1814{inner_init}\
1815 }}
1816}}
1817"""
1818
1819 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1820 # can't use direct assignment to bit-copy a raw array,
1821 # even if our type is not complex
1822 inner_copy = prefix(" ", self.elem.get_code_copy(location.sub, dest, src))
1823 return f"""\
1824{{
1825 int {location.index};
1826
1827 for ({location.index} = 0; {location.index} < {self.size.actual @ src}; {location.index}++) {{
1828{inner_copy}\
1829 }}
1830}}
1831"""
1832
1833 def get_code_fill(self, location: Location, packet: str) -> str:
1834 inner_fill = prefix(" ", self.elem.get_code_fill(location.sub, packet))
1835 return f"""\
1836{{
1837 int {location.index};
1838
1839 for ({location.index} = 0; {location.index} < {self.size.actual @ packet}; {location.index}++) {{
1840{inner_fill}\
1841 }}
1842}}
1843"""
1844
1845 def get_code_free(self, location: Location, packet: str) -> str:
1846 if not self.complex:
1847 return super().get_code_free(location, packet)
1848 inner_free = prefix(" ", self.elem.get_code_free(location.sub, packet))
1849 # Note: we're initializing and destroying *all* elements of the array,
1850 # not just those up to the actual size; otherwise we'd have to
1851 # dynamically initialize and destroy elements as the actual size changes
1852 return f"""\
1853{{
1854 int {location.index};
1855
1856 for ({location.index} = 0; {location.index} < {self.size.declared}; {location.index}++) {{
1857{inner_free}\
1858 }}
1859}}
1860"""
1861
1862 def get_code_hash(self, location: Location) -> str:
1863 raise ValueError(f"hash not supported for array type {self} in field {location.name}")
1864
1865 def size_at(self, location: Location) -> SizeInfo:
1866 return self.size
1867
1868 def inner_cmp(self, location: Location, new: str, old: str) -> str:
1869 return self.elem.get_code_cmp(location.sub, new, old)
1870
1871 def inner_put(self, location: Location, packet: str,
1872 diff_packet: "str | None" = None, json_step: int = 1) -> str:
1873 return self.elem.get_code_put(location.sub_full(json_step), packet, diff_packet)
1874
1875 def inner_get(self, location:Location, packet: str,
1876 deep_diff: bool = False, json_step: int = 1) -> str:
1877 return self.elem.get_code_get(location.sub_full(json_step), packet, deep_diff)
1878
1879 def __str__(self) -> str:
1880 return f"{self.elem}[{self.size}]"
1881
1882
1883# order matters: we want SequenceType methods to override StructType methods
1884class SpecvecType(SequenceType, StructType):
1885 """Type information for a specialized vector field"""
1886
1887 elem: FieldType
1888 """The type of the vector elements"""
1889
1890 complex: bool = True
1891
1892 def __init__(self, elem: StructType):
1893 if elem.complex:
1894 raise ValueError("vectors with complex fields are not supported")
1895 super().__init__(elem.dataio_type, f"struct {elem.struct_type}_vector")
1896 self.elem = elem
1897
1898 def get_code_init(self, location: Location, packet: str) -> str:
1899 return f"""\
1900{self.struct_type}_init(&{location @ packet});
1901"""
1902
1903 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1904 return f"""\
1905{self.struct_type}_copy(&{location @ dest}, &{location @ src});
1906"""
1907
1908 def get_code_free(self, location: Location, packet: str) -> str:
1909 return f"""\
1910{self.struct_type}_free(&{location @ packet});
1911"""
1912
1913 def _sub(self, location: Location, json_step: int = 1) -> Location:
1914 ## return location.deeper(f"(*{self.struct_type}_get(&{location}, {location.index}))")
1915 return location.deeper(f"{location}.p[{location.index}]", json_step)
1916
1917 @cache
1918 def size_at(self, location: Location) -> SizeInfo:
1919 return SizeInfo("GENERATE_PACKETS_ERROR", location.replace(f"{self.struct_type}_size(&{location})"))
1920
1921 def resize(self, location: Location, packet: str, new_size: str) -> str:
1922 # if elem is complex, adjusting vector size takes extra work
1923 # not currently supported; enforced in self.__init__()
1924 assert not self.elem.complex
1925
1926 return f"""\
1927{self.struct_type}_reserve(&{location @ packet}, {new_size});
1928"""
1929
1930 def inner_cmp(self, location: Location, new: str, old: str) -> str:
1931 return self.elem.get_code_cmp(self._sub(location), new, old)
1932
1933 def inner_put(self, location: Location, packet: str,
1934 diff_packet: "str | None" = None, json_step: int = 1) -> str:
1935 return self.elem.get_code_put(self._sub(location, json_step), packet, diff_packet)
1936
1937 def inner_get(self, location:Location, packet: str,
1938 deep_diff: bool = False, json_step: int = 1) -> str:
1939 return self.elem.get_code_get(self._sub(location, json_step), packet, deep_diff)
1940
1941 def __str__(self) -> str:
1942 return f"{self.elem}[*]"
1943
1944
1945class StrvecType(SequenceType):
1946 """Type information for a string vector field"""
1947
1948 dataio_type: str
1949 """How fields of this type are transmitted over network"""
1950
1951 public_type: str
1952 """How fields of this type are represented in C code"""
1953
1954 complex: bool = True
1955
1956 def __init__(self, dataio_type: str, public_type: str):
1957 if dataio_type not in ("string", "estring"):
1958 raise ValueError("not a valid strvec type")
1959
1960 if public_type != "struct strvec":
1961 raise ValueError(f"strvec type with illegal public type: {public_type!r}")
1962
1963 self.dataio_type = dataio_type
1964 self.public_type = public_type
1965
1966 def get_code_declaration(self, location: Location) -> str:
1967 return f"""\
1968{self.public_type} *{location @ None};
1969"""
1970
1971 def get_code_param(self, location: Location) -> str:
1972 if not location.depth:
1973 return f"const {self.public_type} *{location @ None}"
1974 else:
1975 # const struct strvec *const *fieldname
1976 # the final * is already part of the location
1977 # initial const gets added from outside
1978 return f"{self.public_type} *const {location @ None}"
1979
1980 def get_code_init(self, location: Location, packet: str) -> str:
1981 # we're always allocating our vectors, even if they're empty
1982 return f"""\
1983{location @ packet} = strvec_new();
1984"""
1985
1986 def get_code_fill(self, location: Location, packet: str) -> str:
1987 """Generate a code snippet shallow-copying a value of this type from
1988 dsend arguments into a packet struct."""
1989 # safety: the packet's contents will not be modified without cloning
1990 # it first, so discarding 'const' qualifier here is safe
1991 return f"""\
1992{location @ packet} = (struct strvec *) {location @ None};
1993"""
1994
1995 def get_code_copy(self, location: Location, dest: str, src: str) -> str:
1996 # dest is initialized by us ~> not null
1997 # src might be a packet passed in from outside ~> could be null
1998 return f"""\
1999if ({location @ src}) {{
2000 strvec_copy({location @ dest}, {location @ src});
2001}} else {{
2002 strvec_clear({location @ dest});
2003}}
2004"""
2005
2006 def get_code_free(self, location: Location, packet: str) -> str:
2007 return f"""\
2008if ({location @ packet}) {{
2009 strvec_destroy({location @ packet});
2010 {location @ packet} = nullptr;
2011}}
2012"""
2013
2014 def get_code_hash(self, location: Location) -> str:
2015 raise ValueError(f"hash not supported for strvec type {self} in field {location.name}")
2016
2017 def get_code_cmp(self, location: Location, new: str, old: str) -> str:
2018 # "new" packet passed in from outside might have null vector
2019 return f"""\
2020if ({location @ new}) {{
2021 differ = !are_strvecs_equal({location @ old}, {location @ new});
2022}} else {{
2023 differ = (strvec_size({location @ old}) > 0);
2024}}
2025"""
2026
2027 @staticmethod
2028 @cache
2029 def size_at(location: Location) -> SizeInfo:
2030 return SizeInfo("GENERATE_PACKETS_ERROR", location.replace(f"strvec_size({location})"))
2031
2032 def resize(self, location: Location, packet: str, new_size: str) -> str:
2033 return f"""\
2034strvec_reserve({location @ packet}, {new_size});
2035"""
2036
2037 def null_condition(self, location: Location) -> Location:
2038 return location.replace(f"!{location}")
2039
2040 def inner_cmp(self, location: Location, new: str, old: str) -> str:
2041 return f"""\
2042const char *pstr = strvec_get({location @ new}, {location.index});
2043const char *pstr_old = strvec_get({location @ old}, {location.index});
2044
2045differ = (strcmp(pstr_old ? pstr_old : "", pstr ? pstr : "") != 0);
2046"""
2047
2048 def inner_put(self, location: Location, packet: str,
2049 diff_packet: "str | None" = None, json_step: int = 1) -> str:
2050 return f"""\
2051{{
2052 const char *pstr = strvec_get({location @ packet}, {location.index});
2053
2054 e |= DIO_PUT({self.dataio_type}, &dout, &field_addr, pstr ? pstr : "");
2055}}
2056"""
2057
2058 def inner_get(self, location:Location, packet: str,
2059 deep_diff: bool = False, json_step: int = 1) -> str:
2060 return f"""\
2061{{
2062 char readin[MAX_LEN_PACKET];
2063
2064 if (!DIO_GET({self.dataio_type}, &din, &field_addr, readin, sizeof(readin))
2065 || !strvec_set({location @ packet}, {location.index}, readin)) {{
2066 RECEIVE_PACKET_FIELD_ERROR({location.name});
2067 }}
2068}}
2069"""
2070
2071 def __str__(self) -> str:
2072 return f"{self.dataio_type}({self.public_type})"
2073
2074
2075def string_type_ctor(dataio_type: str, public_type: str) -> RawFieldType:
2076 """Field type constructor for both strings and string vectors"""
2077 if dataio_type not in ("string", "estring"):
2078 raise ValueError(f"not a valid string type: {dataio_type}")
2079
2080 if public_type == "char":
2081 return NeedSizeType(dataio_type, public_type, cls = StringType)
2082 elif public_type == "struct strvec":
2083 return StrvecType(dataio_type, public_type)
2084 else:
2085 raise ValueError(f"public type {public_type} not legal for dataio type {dataio_type}")
2086
2087DEFAULT_REGISTRY.dataio_types["string"] = DEFAULT_REGISTRY.dataio_types["estring"] = string_type_ctor
2088
2089
2090class Field:
2091 """A single field of a packet. Consists of a name, type information
2092 (including array sizes) and flags."""
2093
2094 FIELDS_LINE_PATTERN = re.compile(r"""
2095 ^\s*
2096 ( # field type; see also TypeRegistry.TYPE_INFO_PATTERN
2097 \w+ # alias or dataio type
2098 (?: # optionally, public type (if this is not an alias)
2099 \‍(
2100 [^()]*
2101 \‍)
2102 )?
2103 )
2104 \s+
2105 ( # zero or more field declarations
2106 [^;()]*?
2107 )
2108 \s*;\s*
2109 (.*?) # flags
2110 \s*$
2111 """, re.VERBOSE)
2112 """Matches an entire field definition line.
2113
2114 Groups:
2115 - type
2116 - field names and array sizes
2117 - flags"""
2118
2119 FIELD_ARRAY_PATTERN = re.compile(r"^(.+)\[([^][]+)\]$")
2120 """Matches a field definition with one or more array sizes
2121
2122 Groups:
2123 - everything except the final array size
2124 - the final array size"""
2125
2126 FORBIDDEN_NAMES = {"pid", "fields"}
2127 """Field names that are not allowed because they would conflict
2128 with the special fields used by the JSON protocol"""
2129
2130 cfg: ScriptConfig
2131 """Configuration used when generating code for this field"""
2132
2133 name: str
2134 """This field's name (identifier)"""
2135
2136 type_info: FieldType
2137 """This field's type information; see FieldType"""
2138
2139 flags: FieldFlags
2140 """This field's flags; see FieldFlags"""
2141
2142 @classmethod
2143 def parse(cls, cfg: ScriptConfig, line: str, resolve_type: typing.Callable[[str], RawFieldType]) -> "typing.Iterable[Field]":
2144 """Parse a single line defining one or more fields"""
2145 mo = cls.FIELDS_LINE_PATTERN.fullmatch(line)
2146 if mo is None:
2147 raise ValueError(f"invalid field definition: {line!r}")
2148 type_text, fields, flags = (i.strip() for i in mo.groups(""))
2149
2150 type_info = resolve_type(type_text)
2151 flag_info = FieldFlags.parse(flags)
2152
2153 # analyze fields
2154 for field_text in fields.split(","):
2155 field_text = field_text.strip()
2156 field_type = type_info
2157
2158 mo = cls.FIELD_ARRAY_PATTERN.fullmatch(field_text)
2159 while mo is not None:
2160 field_text = mo.group(1)
2161 field_type = field_type.array(SizeInfo.parse(mo.group(2)))
2162 mo = cls.FIELD_ARRAY_PATTERN.fullmatch(field_text)
2163
2164 if not isinstance(field_type, FieldType):
2165 raise ValueError(f"need an array size to use type {field_type}")
2166
2167 if field_text in cls.FORBIDDEN_NAMES:
2168 raise ValueError(f"illegal field name: {field_text}")
2169
2170 yield cls(cfg, field_text, field_type, flag_info)
2171
2172 def __init__(self, cfg: ScriptConfig, name: str, type_info: FieldType, flags: FieldFlags):
2173 self.cfg = cfg
2174 self.name = name
2175 self.type_info = type_info
2176 self.flags = flags
2177
2178 @property
2179 def is_key(self) -> bool:
2180 """Whether this is a key field"""
2181 return self.flags.is_key
2182
2183 @property
2184 def diff(self) -> bool:
2185 """Whether this field uses deep diff / array-diff when transmitted
2186 as part of a delta packet"""
2187 return self.flags.diff
2188
2189 @property
2190 def all_caps(self) -> "typing.AbstractSet[str]":
2191 """Set of all capabilities affecting this field"""
2192 return self.flags.add_caps | self.flags.remove_caps
2193
2194 @property
2195 def complex(self) -> bool:
2196 """Whether this field's type requires special handling;
2197 see FieldType.complex"""
2198 return self.type_info.complex
2199
2200 def is_compatible(self, other: "Field") -> bool:
2201 """Whether two field objects are variants of the same field, i.e.
2202 type-compatible in the packet struct and mutually exclusive based
2203 on their required capabilities.
2204
2205 Note that this function does not test field name."""
2206 return bool(
2207 (
2208 (self.flags.add_caps & other.flags.remove_caps)
2209 or
2210 (self.flags.remove_caps & other.flags.add_caps)
2211 )
2212 and
2213 self.type_info.is_type_compatible(other.type_info)
2214 )
2215
2216 def present_with_caps(self, caps: typing.Container[str]) -> bool:
2217 """Determine whether this field should be part of a variant with the
2218 given capabilities"""
2219 return (
2220 all(cap in caps for cap in self.flags.add_caps)
2221 ) and (
2222 all(cap not in caps for cap in self.flags.remove_caps)
2223 )
2224
2225 def get_declaration(self) -> str:
2226 """Generate the way this field is declared in the packet struct"""
2227 return self.type_info.get_code_declaration(Location(self.name))
2228
2229 def get_param(self) -> str:
2230 """Generate the way this field is declared as a parameter of a
2231 handle or dsend function.
2232
2233 See also self.get_handle_arg()"""
2234 return self.type_info.get_code_param(Location(self.name))
2235
2236 def get_handle_arg(self, packet: str) -> str:
2237 """Generate the way this field is passed as an argument to a handle
2238 function.
2239
2240 See also self.get_handle_param()"""
2241 return self.type_info.get_code_handle_arg(Location(self.name), packet)
2242
2243 def get_init(self) -> str:
2244 """Generate code initializing this field in the packet struct, after
2245 the struct has already been zeroed."""
2246 return self.type_info.get_code_init(Location(self.name), "packet")
2247
2248 def get_copy(self, dest: str, src: str) -> str:
2249 """Generate code deep-copying this field from *src to *dest."""
2250 return self.type_info.get_code_copy(Location(self.name), dest, src)
2251
2252 def get_fill(self) -> str:
2253 """Generate code shallow-copying this field from the dsend arguments
2254 into the packet struct."""
2255 return self.type_info.get_code_fill(Location(self.name), "real_packet")
2256
2257 def get_free(self) -> str:
2258 """Generate code deinitializing this field in the packet struct
2259 before destroying the packet."""
2260 return self.type_info.get_code_free(Location(self.name), "packet")
2261
2262 def get_hash(self) -> str:
2263 """Generate code factoring this field into a hash computation."""
2264 assert self.is_key
2265 return self.type_info.get_code_hash(Location(self.name), "key")
2266
2267 @property
2268 def folded_into_head(self) -> bool:
2269 """Whether this field is folded into the packet header.
2270
2271 If enabled, lone booleans (which only carry one bit of information)
2272 get directly written into the `fields` bitvector, since they don't
2273 take any more space than the usual "content-differs" bit would.
2274
2275 See also get_cmp_wrapper()"""
2276 return (
2277 self.cfg.fold_bool
2278 and self.type_info.foldable
2279 )
2280
2281 def get_cmp_wrapper(self, i: int, pack: "Variant") -> str:
2282 """Generate code setting this field's bit in the `fields` bitvector.
2283
2284 This bit marks whether the field changed and is being transmitted,
2285 except for (non-array) boolean fields folded into the header;
2286 see self.folded_into_head for more details.
2287
2288 See also self.get_cmp()"""
2289 if self.folded_into_head:
2290 info_part = f"""\
2291{self.get_cmp()}\
2292if (differ) {{
2293 different++;
2294}}
2295""" if pack.is_info != "no" else ""
2296
2297 return f"""\
2298{info_part}\
2299/* folded into head */
2300if (real_packet->{self.name}) {{
2301 BV_SET(fields, {i:d});
2302}}
2303"""
2304 else:
2305 info_part = f"""\
2306 different++;
2307""" if pack.is_info != "no" else ""
2308
2309 return f"""\
2310{self.get_cmp()}\
2311if (differ) {{
2312{info_part}\
2313 BV_SET(fields, {i:d});
2314}}
2315"""
2316
2317 def get_cmp(self) -> str:
2318 """Generate code checking whether this field changed.
2319
2320 This code is primarily used by self.get_cmp_wrapper()"""
2321 return self.type_info.get_code_cmp(Location(self.name), "real_packet", "old")
2322
2323 def get_put_wrapper(self, packet: "Variant", index: int, deltafragment: bool) -> str:
2324 """Generate code conditionally putting this field iff its bit in the
2325 `fields` bitvector is set.
2326
2327 Does nothing for boolean fields folded into the packet header.
2328
2329 See also self.get_put()"""
2330 if self.folded_into_head:
2331 return f"""\
2332/* field {index:d} is folded into the header */
2333"""
2334
2335 log = f"""\
2336 {packet.log_macro}(" field '{self.name}' has changed");
2337
2338""" if packet.gen_log else ""
2339
2340 stats = f"""\
2341 stats_{packet.name}_counters[{index:d}]++;
2342
2343""" if packet.gen_stats else ""
2344
2345 put = prefix(" ", self.get_put(deltafragment))
2346
2347 return f"""\
2348if (BV_ISSET(fields, {index:d})) {{
2349{log}\
2350{stats}\
2351{put}\
2352}}
2353"""
2354
2355 def get_put(self, deltafragment: bool) -> str:
2356 """Generate the code putting this field, i.e. writing it to the
2357 dataio stream.
2358
2359 This does not include delta-related code checking whether to
2360 transmit the field in the first place; see self.get_put_wrapper()"""
2361 return f"""\
2362#ifdef FREECIV_JSON_CONNECTION
2363field_addr.name = "{self.name}";
2364#endif /* FREECIV_JSON_CONNECTION */
2365e = 0;
2366
2367{self.get_put_real(deltafragment)}\
2368
2369if (e) {{
2370 log_packet_detailed("'{self.name}' field error detected");
2371}}
2372"""
2373
2374 def get_put_real(self, deltafragment: bool) -> str:
2375 """Generate the bare core of this field's put code. This code is not
2376 yet wrapped for full delta and JSON protocol support.
2377
2378 See self.get_put() for more info"""
2379 return self.type_info.get_code_put(Location(self.name), "real_packet", "old" if deltafragment and self.diff else None)
2380
2381 def get_get_wrapper(self, packet: "Variant", i: int, deltafragment: bool) -> str:
2382 """Generate code conditionally getting this field iff its bit in the
2383 `fields` bitvector is set.
2384
2385 For boolean fields folded into the packet header, instead reads the
2386 field from the bitvector.
2387
2388 See also self.get_get()"""
2389 if self.folded_into_head:
2390 return f"""\
2391real_packet->{self.name} = BV_ISSET(fields, {i:d});
2392"""
2393
2394 get = prefix(" ", self.get_get(deltafragment))
2395 log = f"""\
2396 {packet.log_macro}(" got field '{self.name}'");
2397
2398""" if packet.gen_log else ""
2399
2400 return f"""\
2401if (BV_ISSET(fields, {i:d})) {{
2402{log}\
2403{get}\
2404}}
2405"""
2406
2407 def get_get(self, deltafragment: bool) -> str:
2408 """Generate the code getting this field, i.e. reading it from the
2409 dataio stream.
2410
2411 This does not include delta-related code checking if the field
2412 was transmitted in the first place; see self.get_get_wrapper()"""
2413 return f"""\
2414#ifdef FREECIV_JSON_CONNECTION
2415field_addr.name = "{self.name}";
2416#endif /* FREECIV_JSON_CONNECTION */
2417
2418{self.get_get_real(deltafragment)}\
2419"""
2420
2421 def get_get_real(self, deltafragment: bool) -> str:
2422 """Generate the bare core of this field's get code. This code is not
2423 yet wrapped for full delta and JSON protocol support.
2424
2425 See self.get_get() for more info"""
2426 return self.type_info.get_code_get(Location(self.name), "real_packet", deltafragment and self.diff)
2427
2428
2429class Variant:
2430 """Represents one variant of a packet. Packets with add-cap or
2431 remove-cap fields have different variants for different combinations of
2432 the relevant optional capabilities."""
2433
2434 packet: "Packet"
2435 """The packet this is a variant of"""
2436
2437 var_number: int
2438 """The numeric variant ID (not packet ID) of this variant"""
2439
2440 name: str
2441 """The full name of this variant"""
2442
2443 poscaps: typing.AbstractSet[str]
2444 """The set of optional capabilities that must be present to use this
2445 variant"""
2446
2447 negcaps: typing.AbstractSet[str]
2448 """The set of optional capabilities that must *not* be present to
2449 use this variant"""
2450
2451 fields: typing.Sequence[Field]
2452 """All fields that are transmitted when using this variant"""
2453
2454 key_fields: typing.Sequence[Field]
2455 """The key fields that are used for this variant"""
2456
2457 other_fields: typing.Sequence[Field]
2458 """The non-key fields that are transmitted when using this variant"""
2459
2460 keys_format: str
2461 """The printf format string for this variant's key fields in
2462 generated log calls
2463
2464 See also self.keys_arg"""
2465
2466 keys_arg: str
2467 """The arguments passed when formatting this variant's key fields in
2468 generated log calls
2469
2470 See also self.keys_format"""
2471
2472 def __init__(self, poscaps: typing.Iterable[str], negcaps: typing.Iterable[str],
2473 packet: "Packet", var_number: int):
2474 self.packet = packet
2475 self.var_number = var_number
2476 self.name = f"{packet.name}_{var_number:d}"
2477
2478 self.poscaps = set(poscaps)
2479 self.negcaps = set(negcaps)
2480 self.fields = [
2481 field
2482 for field in packet.all_fields
2483 if field.present_with_caps(self.poscaps)
2484 ]
2485 self.key_fields = [field for field in self.fields if field.is_key]
2486 self.other_fields = [field for field in self.fields if not field.is_key]
2487 # FIXME: Doesn't work with non-int key fields
2488 self.keys_format = ", ".join(["%d"] * len(self.key_fields))
2489 self.keys_arg = ", ".join("real_packet->" + field.name for field in self.key_fields)
2490 if self.keys_arg:
2491 self.keys_arg = ",\n " + self.keys_arg
2492
2493 if not self.fields and packet.fields:
2494 raise ValueError(f"empty variant for nonempty {self.packet_name} with capabilities {self.poscaps}")
2495
2496 @property
2497 def cfg(self) -> ScriptConfig:
2498 """Configuration used when generating code for this packet
2499 variant
2500
2501 See self.packet and Packet.cfg"""
2502 return self.packet.cfg
2503
2504 @property
2505 def gen_stats(self) -> bool:
2506 """Whether to generate delta stats code for this packet variant
2507
2508 See self.cfg and ScriptConfig.gen_stats"""
2509 return self.cfg.gen_stats
2510
2511 @property
2512 def log_macro(self) -> "str | None":
2513 """The log macro used to generate log calls for this packet variant,
2514 or None if no log calls should be generated
2515
2516 See self.cfg and ScriptConfig.log_macro"""
2517 return self.cfg.log_macro
2518
2519 @property
2520 def gen_log(self) -> bool:
2521 """Whether to generate log calls for this packet variant
2522
2523 See self.log_macro"""
2524 return self.log_macro is not None
2525
2526 @property
2527 def packet_name(self) -> str:
2528 """Name of the packet this is a variant of
2529
2530 See Packet.name"""
2531 return self.packet.name
2532
2533 @property
2534 def type(self) -> str:
2535 """Type (enum constant) of the packet this is a variant of
2536
2537 See Packet.type"""
2538 return self.packet.type
2539
2540 @property
2541 def no_packet(self) -> bool:
2542 """Whether the send function should not take/need a packet struct
2543
2544 See Packet.no_packet"""
2545 return self.packet.no_packet
2546
2547 @property
2548 def delta(self) -> bool:
2549 """Whether this packet can use delta optimization
2550
2551 See Packet.delta"""
2552 return self.packet.delta
2553
2554 @property
2555 def want_force(self):
2556 """Whether send function takes a force_to_send boolean
2557
2558 See Packet.want_force"""
2559 return self.packet.want_force
2560
2561 @property
2562 def is_info(self) -> str:
2563 """Whether this is an info or game-info packet"""
2564 return self.packet.is_info
2565
2566 @property
2567 def reset_packets(self) -> "list[str]":
2568 """List of packets to reset when sending or receiving this packet
2569
2570 See Packet.reset_packets"""
2571 return self.packet.reset_packets
2572
2573 @property
2574 def complex(self) -> bool:
2575 """Whether this packet's struct requires special handling for
2576 initialization, copying, and destruction.
2577
2578 Note that this is still True even if the complex-typed fields
2579 of the packet are excluded from this Variant."""
2580 return self.packet.complex
2581
2582 @property
2583 def differ_used(self) -> bool:
2584 """Whether the send function needs a `differ` boolean.
2585
2586 See get_send()"""
2587 return (
2588 self.delta
2589 and (
2590 self.is_info != "no"
2591 or any(
2592 not field.folded_into_head
2593 for field in self.other_fields
2594 )
2595 )
2596 )
2597
2598 @property
2599 def condition(self) -> str:
2600 """The condition determining whether this variant should be used,
2601 based on capabilities.
2602
2603 See get_packet_handlers_fill_capability()"""
2604 if self.poscaps or self.negcaps:
2605 return " && ".join(chain(
2606 (f"has_capability(\"{cap}\", capability)" for cap in sorted(self.poscaps)),
2607 (f"!has_capability(\"{cap}\", capability)" for cap in sorted(self.negcaps)),
2608 ))
2609 else:
2610 return "TRUE"
2611
2612 @property
2613 def bits(self) -> int:
2614 """The length of the bitvector for this variant."""
2615 return len(self.other_fields)
2616
2617 @property
2618 def receive_prototype(self) -> str:
2619 """The prototype of this variant's receive function"""
2620 return f"static struct {self.packet_name} *receive_{self.name}(struct connection *pc)"
2621
2622 @property
2623 def send_prototype(self) -> str:
2624 """The prototype of this variant's send function"""
2625 return f"static int send_{self.name}(struct connection *pc{self.packet.send_params})"
2626
2627 @property
2628 def fill_send_handler(self) -> str:
2629 """Code to set the send handler for this variant
2630
2631 See get_packet_handlers_fill_initial and
2632 get_packet_handlers_fill_capability"""
2633 if self.no_packet:
2634 return f"""\
2635phandlers->send[{self.type}].no_packet = (int(*)(struct connection *)) send_{self.name};
2636"""
2637 elif self.want_force:
2638 return f"""\
2639phandlers->send[{self.type}].force_to_send = (int(*)(struct connection *, const void *, bool)) send_{self.name};
2640"""
2641 else:
2642 return f"""\
2643phandlers->send[{self.type}].packet = (int(*)(struct connection *, const void *)) send_{self.name};
2644"""
2645
2646 @property
2647 def fill_receive_handler(self) -> str:
2648 """Code to set the receive handler for this variant
2649
2650 See get_packet_handlers_fill_initial and
2651 get_packet_handlers_fill_capability"""
2652 return f"""\
2653phandlers->receive[{self.type}] = (void *(*)(struct connection *)) receive_{self.name};
2654"""
2655
2656 def get_copy(self, dest: str, src: str) -> str:
2657 """Generate code deep-copying the fields relevant to this variant
2658 from *src to *dest"""
2659 if not self.complex:
2660 return f"""\
2661*{dest} = *{src};
2662"""
2663 return "".join(
2664 field.get_copy(dest, src)
2665 for field in self.fields
2666 )
2667
2668 def get_stats(self) -> str:
2669 """Generate the declaration of the delta stats counters associated
2670 with this packet variant"""
2671 names = ", ".join(
2672 f"\"{field.name}\""
2673 for field in self.other_fields
2674 )
2675
2676 return f"""\
2677static int stats_{self.name}_sent;
2678static int stats_{self.name}_discarded;
2679static int stats_{self.name}_counters[{self.bits:d}];
2680static const char *stats_{self.name}_names[] = {{{names}}};
2681
2682"""
2683
2684 def get_bitvector(self) -> str:
2685 """Generate the declaration of the fields bitvector type for this
2686 packet variant"""
2687 return f"""\
2688BV_DEFINE({self.name}_fields, {self.bits});
2689"""
2690
2691 def get_report_part(self) -> str:
2692 """Generate the part of the delta_stats_report() function specific
2693 to this packet variant"""
2694 return f"""\
2695
2696if (stats_{self.name}_sent > 0
2697 && stats_{self.name}_discarded != stats_{self.name}_sent) {{
2698 log_test("{self.name} %d out of %d got discarded",
2699 stats_{self.name}_discarded, stats_{self.name}_sent);
2700 for (i = 0; i < {self.bits}; i++) {{
2701 if (stats_{self.name}_counters[i] > 0) {{
2702 log_test(" %4d / %4d: %2d = %s",
2703 stats_{self.name}_counters[i],
2704 (stats_{self.name}_sent - stats_{self.name}_discarded),
2705 i, stats_{self.name}_names[i]);
2706 }}
2707 }}
2708}}
2709"""
2710
2711 def get_reset_part(self) -> str:
2712 """Generate the part of the delta_stats_reset() function specific
2713 to this packet variant"""
2714 return f"""\
2715stats_{self.name}_sent = 0;
2716stats_{self.name}_discarded = 0;
2717memset(stats_{self.name}_counters, 0,
2718 sizeof(stats_{self.name}_counters));
2719"""
2720
2721 def get_hash(self) -> str:
2722 """Generate the key hash function for this variant"""
2723 if not self.key_fields:
2724 return f"""\
2725#define hash_{self.name} hash_const
2726"""
2727
2728 intro = f"""\
2729static genhash_val_t hash_{self.name}(const void *vkey)
2730{{
2731 const struct {self.packet_name} *key = (const struct {self.packet_name} *) vkey;
2732 genhash_val_t result = 0;
2733
2734"""
2735
2736 body = f"""\
2737
2738 result *= 5;
2739
2740""".join(prefix(" ", field.get_hash()) for field in self.key_fields)
2741
2742 extro = f"""\
2743
2744 result &= 0xFFFFFFFF;
2745 return result;
2746}}
2747
2748"""
2749
2750 return intro + body + extro
2751
2752 def get_cmp(self) -> str:
2753 """Generate the key comparison function for this variant"""
2754 if not self.key_fields:
2755 return f"""\
2756#define cmp_{self.name} cmp_const
2757"""
2758
2759 # note: the names `old` and `real_packet` allow reusing
2760 # field-specific cmp code
2761 intro = f"""\
2762static bool cmp_{self.name}(const void *vkey1, const void *vkey2)
2763{{
2764 const struct {self.packet_name} *old = (const struct {self.packet_name} *) vkey1;
2765 const struct {self.packet_name} *real_packet = (const struct {self.packet_name} *) vkey2;
2766 bool differ;
2767
2768"""
2769
2770 body = f"""\
2771
2772 if (differ) {{
2773 return !differ;
2774 }}
2775
2776""".join(prefix(" ", field.get_cmp()) for field in self.key_fields)
2777
2778 extro = f"""\
2779
2780 return !differ;
2781}}
2782"""
2783
2784 return intro + body + extro
2785
2786 def get_send(self) -> str:
2787 """Generate the send function for this packet variant"""
2788 if self.no_packet:
2789 # empty packet, don't need anything
2790 main_header = after_header = before_return = ""
2791 elif not self.packet.want_pre_send:
2792 # no pre-send, don't need to copy the packet
2793 main_header = f"""\
2794 const struct {self.packet_name} *real_packet = packet;
2795 int e;
2796"""
2797 after_header = before_return = ""
2798 elif not self.complex:
2799 # bit-copy the packet, no cleanup needed
2800 main_header = f"""\
2801 /* copy packet for pre-send */
2802 struct {self.packet_name} packet_buf = *packet;
2803 const struct {self.packet_name} *real_packet = &packet_buf;
2804 int e;
2805"""
2806 after_header = before_return = ""
2807 else:
2808 # deep-copy the packet for pre-send, have to destroy the copy
2809 # Note: SEND_PACKET_START has both declarations and statements,
2810 # so we have to break the pre-send copying up across either side
2811 copy = prefix(" ", self.get_copy("(&packet_buf)", "packet"))
2812 main_header = f"""\
2813 /* buffer to hold packet copy for pre-send */
2814 struct {self.packet_name} packet_buf;
2815 const struct {self.packet_name} *real_packet = &packet_buf;
2816 int e;
2817"""
2818 after_header = f"""\
2819 init_{self.packet_name}(&packet_buf);
2820{copy}\
2821"""
2822 before_return = f"""\
2823 free_{self.packet_name}(&packet_buf);
2824"""
2825
2826 report = f"""\
2827
2828 stats_total_sent++;
2829 stats_{self.name}_sent++;
2830""" if self.gen_stats else ""
2831
2832 log_key = f"""\
2833
2834 {self.log_macro}("{self.name}: sending info about ({self.keys_format})"{self.keys_arg});
2835""" if self.gen_log else ""
2836
2837 if not self.packet.want_pre_send:
2838 pre_send = ""
2839 elif self.no_packet:
2840 pre_send = f"""\
2841
2842 pre_send_{self.packet_name}(pc, nullptr);
2843"""
2844 else:
2845 pre_send = f"""\
2846
2847 pre_send_{self.packet_name}(pc, &packet_buf);
2848"""
2849
2850 delta_header = "\n" + prefix(" ", self.get_delta_send_header(before_return)) if self.delta else ""
2851
2852 init_field_addr = f"""\
2853
2854#ifdef FREECIV_JSON_CONNECTION
2855 struct plocation field_addr;
2856 {{
2857 struct plocation *field_addr_tmp = plocation_field_new(nullptr);
2858 field_addr = *field_addr_tmp;
2859 FC_FREE(field_addr_tmp);
2860 }}
2861#endif /* FREECIV_JSON_CONNECTION */
2862
2863""" if self.fields else ""
2864
2865 put_key = "".join(
2866 prefix(" ", field.get_put(False)) + "\n"
2867 for field in self.key_fields
2868 )
2869
2870 nondelta_body = "\n".join(
2871 prefix(" ", field.get_put(False))
2872 for field in self.other_fields
2873 )
2874
2875 if self.delta:
2876 delta_body = prefix(" ", self.get_delta_send_body())
2877 body = f"""\
2878#ifdef FREECIV_DELTA_PROTOCOL
2879{delta_body}\
2880
2881#else /* FREECIV_DELTA_PROTOCOL */
2882{nondelta_body}\
2883#endif /* FREECIV_DELTA_PROTOCOL */
2884"""
2885 else:
2886 body = nondelta_body
2887
2888 if not self.packet.want_post_send:
2889 post_send = ""
2890 elif self.no_packet:
2891 post_send = f"""\
2892 post_send_{self.packet_name}(pc, nullptr);
2893"""
2894 else:
2895 post_send = f"""\
2896 post_send_{self.packet_name}(pc, real_packet);
2897"""
2898
2899 return f"""\
2900{self.send_prototype}
2901{{
2902{main_header}\
2903 SEND_PACKET_START({self.type});
2904{after_header}\
2905{log_key}\
2906{report}\
2907{pre_send}\
2908{delta_header}\
2909{init_field_addr}\
2910{put_key}\
2911{body}\
2912
2913{post_send}\
2914{before_return}\
2915 SEND_PACKET_END({self.type});
2916}}
2917
2918"""
2919
2920 def get_delta_send_header(self, before_return: str = "") -> str:
2921 """Helper for get_send(). Generate the part of the send function
2922 that determined which fields differ between the real packet and
2923 the last cached packet, and possibly discards is-info packets."""
2924 declare_differ = f"""\
2925bool differ;
2926""" if self.differ_used else ""
2927
2928 if self.is_info == "no":
2929 declare_different = ""
2930 elif self.want_force:
2931 declare_different = f"""\
2932int different = force_to_send;
2933"""
2934 else:
2935 declare_different = f"""\
2936int different = 0;
2937"""
2938
2939 force_info = """\
2940 different = 1; /* Force to send. */
2941""" if self.is_info != "no" else ""
2942
2943 cmp_part = "\n".join(
2944 field.get_cmp_wrapper(i, self)
2945 for i, field in enumerate(self.other_fields)
2946 )
2947
2948 if self.is_info != "no":
2949 log_discard = f"""\
2950 {self.log_macro}(" no change -> discard");
2951""" if self.gen_log else ""
2952
2953 stats_discard = f"""\
2954 stats_{self.name}_discarded++;
2955""" if self.gen_stats else ""
2956
2957 discard_part = f"""\
2958
2959if (different == 0) {{
2960{log_discard}\
2961{stats_discard}\
2962{before_return}\
2963 SEND_PACKET_DISCARD();
2964}}
2965"""
2966 else:
2967 discard_part = ""
2968
2969 return f"""\
2970#ifdef FREECIV_DELTA_PROTOCOL
2971{self.name}_fields fields;
2972struct {self.packet_name} *old;
2973{declare_differ}\
2974{declare_different}\
2975struct genhash **hash = pc->phs.sent + {self.type};
2976
2977if (nullptr == *hash) {{
2978 *hash = genhash_new_full(hash_{self.name}, cmp_{self.name},
2979 nullptr, nullptr, nullptr, destroy_{self.packet_name});
2980}}
2981BV_CLR_ALL(fields);
2982
2983if (!genhash_lookup(*hash, real_packet, (void **) &old)) {{
2984 old = fc_malloc(sizeof(*old));
2985 /* temporary bitcopy just to insert correctly */
2986 *old = *real_packet;
2987 genhash_insert(*hash, old, old);
2988 init_{self.packet_name}(old);
2989{force_info}\
2990}}
2991
2992{cmp_part}\
2993{discard_part}\
2994#endif /* FREECIV_DELTA_PROTOCOL */
2995"""
2996
2997 def get_delta_send_body(self) -> str:
2998 """Helper for get_send(). Generate the part of the send function
2999 that transmits the delta between the real packet and the last
3000 cached packet.
3001
3002 See also get_delta_send_header()"""
3003 body = "\n".join(
3004 field.get_put_wrapper(self, i, True)
3005 for i, field in enumerate(self.other_fields)
3006 )
3007
3008 copy_to_old = self.get_copy("old", "real_packet")
3009
3010 # Reset some packets' delta state
3011 reset_part = "".join(
3012 f"""\
3013
3014hash = pc->phs.sent + {reset_packet};
3015if (nullptr != *hash) {{
3016 genhash_remove(*hash, real_packet);
3017}}
3018"""
3019 for reset_packet in self.reset_packets
3020 )
3021
3022 return f"""\
3023#ifdef FREECIV_JSON_CONNECTION
3024field_addr.name = "fields";
3025#endif /* FREECIV_JSON_CONNECTION */
3026e = 0;
3027e |= DIO_BV_PUT(&dout, &field_addr, fields);
3028if (e) {{
3029 log_packet_detailed("fields bitvector error detected");
3030}}
3031
3032{body}\
3033
3034{copy_to_old}\
3035{reset_part}\
3036"""
3037
3038 def get_receive(self) -> str:
3039 """Generate the receive function for this packet variant"""
3040 init_field_addr = f"""\
3041
3042#ifdef FREECIV_JSON_CONNECTION
3043 struct plocation field_addr;
3044 {{
3045 struct plocation *field_addr_tmp = plocation_field_new(nullptr);
3046 field_addr = *field_addr_tmp;
3047 FC_FREE(field_addr_tmp);
3048 }}
3049#endif /* FREECIV_JSON_CONNECTION */
3050""" if self.fields else ""
3051
3052 get_key = "".join(
3053 prefix(" ", field.get_get(False)) + "\n"
3054 for field in self.key_fields
3055 )
3056
3057 log_key = f"""\
3058 {self.log_macro}("{self.name}: got info about ({self.keys_format})"{self.keys_arg});
3059
3060""" if self.gen_log else ""
3061
3062 nondelta_body = "\n".join(
3063 prefix(" ", field.get_get(False))
3064 for field in self.other_fields
3065 ) or f"""\
3066 real_packet->__dummy = 0xff;
3067"""
3068
3069 if self.delta:
3070 delta_body = prefix(" ", self.get_delta_receive_body())
3071 get_body = f"""\
3072#ifdef FREECIV_DELTA_PROTOCOL
3073{delta_body}\
3074
3075#else /* FREECIV_DELTA_PROTOCOL */
3076{nondelta_body}\
3077#endif /* FREECIV_DELTA_PROTOCOL */
3078"""
3079 else:
3080 get_body = nondelta_body
3081
3082 post_receive = f"""\
3083 post_receive_{self.packet_name}(pc, real_packet);
3084""" if self.packet.want_post_recv else ""
3085
3086 return f"""\
3087{self.receive_prototype}
3088{{
3089#define FREE_PACKET_STRUCT(_packet) free_{self.packet_name}(_packet)
3090 RECEIVE_PACKET_START({self.packet_name}, real_packet);
3091{init_field_addr}\
3092
3093{get_key}\
3094{log_key}\
3095{get_body}\
3096
3097{post_receive}\
3098 RECEIVE_PACKET_END(real_packet);
3099#undef FREE_PACKET_STRUCT
3100}}
3101
3102"""
3103
3104 def get_delta_receive_body(self) -> str:
3105 """Helper for get_receive(). Generate the part of the receive
3106 function responsible for recreating the full packet from the
3107 received delta and the last cached packet.
3108
3109 Note: This code fragment declares variables. To comply with
3110 CodingStyle, it should be enclosed in a block {} or #ifdef."""
3111 log_no_old = f"""\
3112 {self.log_macro}(" no old info");
3113""" if self.gen_log else ""
3114
3115 copy_from_old = prefix(" ", self.get_copy("real_packet", "old"))
3116 copy_to_old = prefix(" ", self.get_copy("old", "real_packet"))
3117
3118 body = "\n".join(
3119 field.get_get_wrapper(self, i, True)
3120 for i, field in enumerate(self.other_fields)
3121 )
3122
3123 # Reset some packets' delta state
3124 reset_part = "".join(
3125 f"""\
3126
3127hash = pc->phs.received + {reset_packet};
3128if (nullptr != *hash) {{
3129 genhash_remove(*hash, real_packet);
3130}}
3131"""
3132 for reset_packet in self.reset_packets
3133 )
3134
3135 return f"""\
3136{self.name}_fields fields;
3137struct {self.packet_name} *old;
3138struct genhash **hash = pc->phs.received + {self.type};
3139
3140if (nullptr == *hash) {{
3141 *hash = genhash_new_full(hash_{self.name}, cmp_{self.name},
3142 nullptr, nullptr, nullptr, destroy_{self.packet_name});
3143}}
3144
3145if (genhash_lookup(*hash, real_packet, (void **) &old)) {{
3146{copy_from_old}\
3147}} else {{
3148 /* packet is already initialized empty */
3149{log_no_old}\
3150}}
3151
3152#ifdef FREECIV_JSON_CONNECTION
3153field_addr.name = "fields";
3154#endif /* FREECIV_JSON_CONNECTION */
3155DIO_BV_GET(&din, &field_addr, fields);
3156
3157{body}\
3158
3159if (nullptr == old) {{
3160 old = fc_malloc(sizeof(*old));
3161 init_{self.packet_name}(old);
3162{copy_to_old}\
3163 genhash_insert(*hash, old, old);
3164}} else {{
3165{copy_to_old}\
3166}}
3167{reset_part}\
3168"""
3169
3170
3171class Directions(Enum):
3172 """Describes the possible combinations of directions for which a packet
3173 can be valid"""
3174
3175 # Note: "sc" and "cs" are used to match the packet flags
3176
3177 DOWN_ONLY = frozenset({"sc"})
3178 """Packet may only be sent from server to client"""
3179
3180 UP_ONLY = frozenset({"cs"})
3181 """Packet may only be sent from client to server"""
3182
3183 UNRESTRICTED = frozenset({"sc", "cs"})
3184 """Packet may be sent both ways"""
3185
3186 @property
3187 def down(self) -> bool:
3188 """Whether a packet may be sent from server to client"""
3189 return "sc" in self.value
3190
3191 @property
3192 def up(self) -> bool:
3193 """Whether a packet may be sent from client to server"""
3194 return "cs" in self.value
3195
3196
3197class Packet:
3198 """Represents a single packet type (possibly with multiple variants)"""
3199
3200 RESET_PATTERN = re.compile(r"^reset\‍((.*)\‍)$")
3201 """Matches a reset flag
3202
3203 Groups:
3204 - the packet type to reset"""
3205
3206 cfg: ScriptConfig
3207 """Configuration used when generating code for this packet"""
3208
3209 type: str
3210 """The packet type in allcaps (PACKET_FOO), as defined in the
3211 packet_type enum
3212
3213 See also self.name"""
3214
3215 type_number: int
3216 """The numeric ID of this packet type"""
3217
3218 reset_packets: "list[str]"
3219 """List of packet types to drop from the cache when sending or
3220 receiving this packet type"""
3221
3222 is_info: 'typing.Literal["no", "yes", "game"]' = "no"
3223 """Whether this is an is-info or is-game-info packet.
3224 "no" means normal, "yes" means is-info, "game" means is-game-info"""
3225
3226 want_dsend: bool = False
3227 """Whether to generate a direct-send function taking field values
3228 instead of a packet struct"""
3229
3230 want_lsend: bool = False
3231 """Whether to generate a list-send function sending a packet to
3232 multiple connections"""
3233
3234 want_force: bool = False
3235 """Whether send functions should take a force_to_send parameter
3236 to override discarding is-info packets where nothing changed"""
3237
3238 want_pre_send: bool = False
3239 """Whether a pre-send hook should be called when sending this packet"""
3240
3241 want_post_send: bool = False
3242 """Whether a post-send hook should be called after sending this packet"""
3243
3244 want_post_recv: bool = False
3245 """Whether a post-receive hook should be called when receiving this
3246 packet"""
3247
3248 delta: bool = True
3249 """Whether to use delta optimization for this packet"""
3250
3251 no_handle: bool = False
3252 """Whether this packet should *not* be handled normally"""
3253
3254 handle_via_packet: bool = True
3255 """Whether to pass the entire packet (by reference) to the handle
3256 function (rather than each field individually)"""
3257
3258 handle_per_conn: bool = False
3259 """Whether this packet's handle function should be called with the
3260 connection instead of the attached player"""
3261
3262 dirs: Directions
3263 """Which directions this packet can be sent in"""
3264
3265 all_fields: "list[Field]"
3266 """List of all fields of this packet, including name duplicates for
3267 different capability variants that are compatible.
3268
3269 Only relevant for creating Variants; self.fields should be used when
3270 not dealing with capabilities or Variants."""
3271
3272 fields: "list[Field]"
3273 """List of all fields of this packet, with only one field of each name"""
3274
3275 variants: "list[Variant]"
3276 """List of all variants of this packet"""
3277
3278 def __init__(self, cfg: ScriptConfig, packet_type: str, packet_number: int, flags_text: str,
3279 lines: typing.Iterable[str], resolve_type: typing.Callable[[str], RawFieldType]):
3280 self.cfg = cfg
3281 self.type = packet_type
3282 self.type_number = packet_number
3283
3284 self.reset_packets = []
3285 dirs: 'set[typing.Literal["sc", "cs"]]' = set()
3286
3287 for flag in flags_text.split(","):
3288 flag = flag.strip()
3289 if not flag:
3290 continue
3291
3292 if flag in ("sc", "cs"):
3293 dirs.add(flag)
3294 continue
3295 if flag == "is-info":
3296 self.is_info = "yes"
3297 continue
3298 if flag == "is-game-info":
3299 self.is_info = "game"
3300 continue
3301 if flag == "dsend":
3302 self.want_dsend = True
3303 continue
3304 if flag == "lsend":
3305 self.want_lsend = True
3306 continue
3307 if flag == "force":
3308 self.want_force = True
3309 continue
3310 if flag == "pre-send":
3311 self.want_pre_send = True
3312 continue
3313 if flag == "post-send":
3314 self.want_post_send = True
3315 continue
3316 if flag == "post-recv":
3317 self.want_post_recv = True
3318 continue
3319 if flag == "no-delta":
3320 self.delta = False
3321 continue
3322 if flag == "no-handle":
3323 self.no_handle = True
3324 continue
3325 if flag == "handle-via-fields":
3326 self.handle_via_packet = False
3327 continue
3328 if flag == "handle-per-conn":
3329 self.handle_per_conn = True
3330 continue
3331
3332 mo = __class__.RESET_PATTERN.fullmatch(flag)
3333 if mo is not None:
3334 self.reset_packets.append(mo.group(1))
3335 continue
3336
3337 raise ValueError(f"unrecognized flag for {self.type}: {flag!r}")
3338
3339 if not dirs:
3340 raise ValueError(f"no directions defined for {self.type}")
3341 self.dirs = Directions(frozenset(dirs))
3342
3343 raw_fields = [
3344 field
3345 for line in lines
3346 for field in Field.parse(self.cfg, line, resolve_type)
3347 ]
3348 # put key fields before all others
3349 key_fields = [field for field in raw_fields if field.is_key]
3350 other_fields = [field for field in raw_fields if not field.is_key]
3351 self.all_fields = key_fields + other_fields
3352
3353 self.fields = []
3354 # check for duplicate field names
3355 for next_field in self.all_fields:
3356 duplicates = [field for field in self.fields if field.name == next_field.name]
3357 if not duplicates:
3358 self.fields.append(next_field)
3359 continue
3360 if not all(field.is_compatible(next_field) for field in duplicates):
3361 raise ValueError(f"incompatible fields with duplicate name: {packet_type}({packet_number}).{next_field.name}")
3362
3363 # valid, since self.fields is already set
3364 if self.no_packet:
3365 self.delta = False
3366 self.handle_via_packet = False
3367
3368 if self.want_dsend:
3369 raise ValueError(f"requested dsend for {self.type} without fields isn't useful")
3370
3371 # create cap variants
3372 all_caps = self.all_caps # valid, since self.all_fields is already set
3373 self.variants = [
3374 Variant(caps, all_caps.difference(caps), self, i + 100)
3375 for i, caps in enumerate(powerset(sorted(all_caps)))
3376 ]
3377
3378 @property
3379 def name(self) -> str:
3380 """Snake-case name of this packet type"""
3381 return self.type.lower()
3382
3383 @property
3384 def no_packet(self) -> bool:
3385 """Whether this packet's send functions should take no packet
3386 argument. This is the case iff this packet has no fields."""
3387 return not self.fields
3388
3389 @property
3390 def send_params(self) -> str:
3391 """Parameters for the send and lsend functions, not including the
3392 connection or list of connections to send to"""
3393 return (
3394 f", const struct {self.name} *packet" if not self.no_packet else ""
3395 ) + (
3396 ", bool force_to_send" if self.want_force else ""
3397 )
3398
3399 @property
3400 def send_args(self) -> str:
3401 """Arguments passed from lsend to send, not including the
3402 connection to send to
3403
3404 See also extra_send_args"""
3405 assert self.want_lsend
3406 return (
3407 ", packet" if not self.no_packet else ""
3408 ) + (
3409 ", force_to_send" if self.want_force else ""
3410 )
3411
3412 @property
3413 def dsend_params(self) -> str:
3414 """Parameters for the dsend and dlsend functions, not including the
3415 connection or list of connections to send to"""
3416 assert self.want_dsend
3417 return "".join(
3418 f", {field.get_param()}"
3419 for field in self.fields
3420 ) + (", bool force_to_send" if self.want_force else "")
3421
3422 @property
3423 def send_prototype(self) -> str:
3424 """Prototype for the regular send function"""
3425 return f"int send_{self.name}(struct connection *pc{self.send_params})"
3426
3427 @property
3428 def lsend_prototype(self) -> str:
3429 """Prototype for the lsend function (takes a list of connections)"""
3430 assert self.want_lsend
3431 return f"void lsend_{self.name}(struct conn_list *dest{self.send_params})"
3432
3433 @property
3434 def dsend_prototype(self) -> str:
3435 """Prototype for the dsend function (directly takes values instead of a packet struct)"""
3436 assert self.want_dsend
3437 return f"int dsend_{self.name}(struct connection *pc{self.dsend_params})"
3438
3439 @property
3440 def dlsend_prototype(self) -> str:
3441 """Prototype for the dlsend function (directly takes values; list of connections)"""
3442 assert self.want_dsend
3443 assert self.want_lsend
3444 return f"void dlsend_{self.name}(struct conn_list *dest{self.dsend_params})"
3445
3446 @property
3447 def all_caps(self) -> "set[str]":
3448 """Set of all capabilities affecting this packet"""
3449 return {cap for field in self.all_fields for cap in field.all_caps}
3450
3451 @property
3452 def complex(self) -> bool:
3453 """Whether this packet's struct requires special handling for
3454 initialization, copying, and destruction."""
3455 return any(field.complex for field in self.fields)
3456
3457 def get_struct(self) -> str:
3458 """Generate the struct definition for this packet"""
3459 intro = f"""\
3460struct {self.name} {{
3461"""
3462 extro = f"""\
3463}};
3464
3465"""
3466
3467 body = "".join(
3468 prefix(" ", field.get_declaration())
3469 for field in self.fields
3470 ) or f"""\
3471 char __dummy; /* to avoid malloc(0); */
3472"""
3473 return intro+body+extro
3474
3475 def get_prototypes(self) -> str:
3476 """Generate the header prototype declarations for the public
3477 functions associated with this packet."""
3478 result = f"""\
3479{self.send_prototype};
3480"""
3481 if self.want_lsend:
3482 result += f"""\
3483{self.lsend_prototype};
3484"""
3485 if self.want_dsend:
3486 result += f"""\
3487{self.dsend_prototype};
3488"""
3489 if self.want_lsend:
3490 result += f"""\
3491{self.dlsend_prototype};
3492"""
3493 return result + "\n"
3494
3495 def get_stats(self) -> str:
3496 """Generate the code declaring counters for this packet's variants.
3497
3498 See Variant.get_stats()"""
3499 return "".join(v.get_stats() for v in self.variants)
3500
3501 def get_report_part(self) -> str:
3502 """Generate this packet's part of the delta_stats_report() function.
3503
3504 See Variant.get_report_part() and
3505 PacketsDefinition.code_delta_stats_report"""
3506 return "".join(v.get_report_part() for v in self.variants)
3507
3508 def get_reset_part(self) -> str:
3509 """Generate this packet's part of the delta_stats_reset() function.
3510
3511 See Variant.get_reset_part() and
3512 PacketsDefinition.code_delta_stats_reset"""
3513 return "\n".join(v.get_reset_part() for v in self.variants)
3514
3515 def get_init(self) -> str:
3516 """Generate this packet's init function, which initializes the
3517 packet struct so its complex-typed fields are usable, and sets
3518 all fields to the empty default state used for computing deltas"""
3519 if self.complex:
3520 field_parts = "\n" + "".join(
3521 prefix(" ", field.get_init())
3522 for field in self.fields
3523 )
3524 assert len(field_parts) > 1, f"complex packet with no field initializers: {self.type}({self.type_number})"
3525 else:
3526 field_parts = ""
3527 return f"""\
3528static inline void init_{self.name}(struct {self.name} *packet)
3529{{
3530 memset(packet, 0, sizeof(*packet));
3531{field_parts}\
3532}}
3533
3534"""
3535
3536 def get_free_destroy(self) -> str:
3537 """Generate this packet's free and destroy functions, which free
3538 memory associated with complex-typed fields of this packet, and
3539 optionally the allocation of the packet itself (destroy)."""
3540 if not self.complex:
3541 return f"""\
3542#define free_{self.name}(_packet) (void) 0
3543#define destroy_{self.name} free
3544
3545"""
3546
3547 # drop fields in reverse order, in case later fields depend on
3548 # earlier fields (e.g. for actual array sizes)
3549 field_parts = "".join(
3550 prefix(" ", field.get_free())
3551 for field in reversed(self.fields)
3552 )
3553 assert field_parts, f"complex packet with no field destructors: {self.type}({self.type_number})"
3554 # NB: destroy_*() takes void* to avoid casts
3555 return f"""\
3556static inline void free_{self.name}(struct {self.name} *packet)
3557{{
3558{field_parts}\
3559}}
3560
3561static inline void destroy_{self.name}(void *packet)
3562{{
3563 free_{self.name}((struct {self.name} *) packet);
3564 free(packet);
3565}}
3566
3567"""
3568
3569 def get_send(self) -> str:
3570 """Generate the implementation of the send function, which sends a
3571 given packet to a given connection."""
3572 if self.no_packet:
3573 func = "no_packet"
3574 args = ""
3575 elif self.want_force:
3576 func = "force_to_send"
3577 args = ", packet, force_to_send"
3578 else:
3579 func = "packet"
3580 args = ", packet"
3581
3582 return f"""\
3583{self.send_prototype}
3584{{
3585 if (!pc->used) {{
3586 log_error("WARNING: trying to send data to the closed connection %s",
3587 conn_description(pc));
3588 return -1;
3589 }}
3590 fc_assert_ret_val_msg(pc->phs.handlers->send[{self.type}].{func} != nullptr, -1,
3591 "Handler for {self.type} not installed");
3592 return pc->phs.handlers->send[{self.type}].{func}(pc{args});
3593}}
3594
3595"""
3596
3597 def get_variants(self) -> str:
3598 """Generate all code associated with individual variants of this
3599 packet; see the Variant class (and its methods) for details."""
3600 result = ""
3601 for v in self.variants:
3602 if v.delta:
3603 result += f"""\
3604#ifdef FREECIV_DELTA_PROTOCOL
3605{v.get_hash()}\
3606{v.get_cmp()}\
3607{v.get_bitvector()}\
3608#endif /* FREECIV_DELTA_PROTOCOL */
3609
3610"""
3611 result += v.get_receive()
3612 result += v.get_send()
3613 return result
3614
3615 def get_lsend(self) -> str:
3616 """Generate the implementation of the lsend function, which takes
3617 a list of connections to send a packet to."""
3618 if not self.want_lsend: return ""
3619 return f"""\
3620{self.lsend_prototype}
3621{{
3622 conn_list_iterate(dest, pconn) {{
3623 send_{self.name}(pconn{self.send_args});
3624 }} conn_list_iterate_end;
3625}}
3626
3627"""
3628
3629 def get_dsend(self) -> str:
3630 """Generate the implementation of the dsend function, which directly
3631 takes packet fields instead of a packet struct."""
3632 if not self.want_dsend: return ""
3633 # safety: fill just borrows the given values; no init/free necessary
3634 fill = "".join(
3635 prefix(" ", field.get_fill())
3636 for field in self.fields
3637 )
3638 return f"""\
3639{self.dsend_prototype}
3640{{
3641 struct {self.name} packet, *real_packet = &packet;
3642
3643{fill}\
3644
3645 return send_{self.name}(pc, real_packet);
3646}}
3647
3648"""
3649
3650 def get_dlsend(self) -> str:
3651 """Generate the implementation of the dlsend function, combining
3652 dsend and lsend functionality.
3653
3654 See self.get_dsend() and self.get_lsend()"""
3655 if not (self.want_lsend and self.want_dsend): return ""
3656 # safety: fill just borrows the given values; no init/free necessary
3657 fill = "".join(
3658 prefix(" ", field.get_fill())
3659 for field in self.fields
3660 )
3661 return f"""\
3662{self.dlsend_prototype}
3663{{
3664 struct {self.name} packet, *real_packet = &packet;
3665
3666{fill}\
3667
3668 lsend_{self.name}(dest, real_packet);
3669}}
3670
3671"""
3672
3673
3674class PacketsDefinition(typing.Iterable[Packet]):
3675 """Represents an entire packets definition file"""
3676
3677 COMMENT_START_PATTERN = re.compile(r"""
3678 ^\s* # strip initial whitespace
3679 (.*?) # actual content; note the reluctant quantifier
3680 \s* # note: this can cause quadratic backtracking
3681 (?: # match a potential comment
3682 (?: # EOL comment (or just EOL)
3683 (?:
3684 (?:\#|//) # opening # or //
3685 .*
3686 )?
3687 ) | (?: # block comment ~> capture remaining text
3688 /\* # opening /*
3689 [^*]* # text that definitely can't end the block comment
3690 (.*) # remaining text, might contain a closing */
3691 )
3692 )
3693 (?:\n)? # optional newline in case those aren't stripped
3694 $
3695 """, re.VERBOSE)
3696 """Used to clean lines when not starting inside a block comment. Finds
3697 the start of a block comment, if it exists.
3698
3699 Groups:
3700 - Actual content before any comment starts; stripped.
3701 - Remaining text after the start of a block comment. Not present if no
3702 block comment starts on this line."""
3703
3704 COMMENT_END_PATTERN = re.compile(r"""
3705 ^
3706 .*? # comment; note the reluctant quantifier
3707 (?: # end of block comment ~> capture remaining text
3708 \*/ # closing */
3709 \s* # strip whitespace after comment
3710 (.*) # remaining text
3711 )?
3712 (?:\n)? # optional newline in case those aren't stripped
3713 $
3714 """, re.VERBOSE)
3715 """Used to clean lines when starting inside a block comment. Finds the
3716 end of a block comment, if it exists.
3717
3718 Groups:
3719 - Remaining text after the end of the block comment; lstripped. Not
3720 present if the block comment doesn't end on this line."""
3721
3722 TYPE_PATTERN = re.compile(r"^\s*type\s+(\w+)\s*=\s*(.+?)\s*$")
3723 """Matches type alias definition lines
3724
3725 Groups:
3726 - the alias to define
3727 - the meaning for the alias"""
3728
3729 PACKET_HEADER_PATTERN = re.compile(r"^\s*(PACKET_\w+)\s*=\s*(\d+)\s*;\s*(.*?)\s*$")
3730 """Matches the header line of a packet definition
3731
3732 Groups:
3733 - packet type name
3734 - packet number
3735 - packet flags text"""
3736
3737 PACKET_END_PATTERN = re.compile(r"^\s*end\s*$")
3738 """Matches the "end" line terminating a packet definition"""
3739
3740 cfg: ScriptConfig
3741 """Configuration used for code generated from this definition"""
3742
3743 type_registry: TypeRegistry
3744 """Type registry used to resolve type classes for field types"""
3745
3746 types: "dict[str, RawFieldType]"
3747 """Maps type aliases and definitions to the parsed type"""
3748
3749 packets: "list[Packet]"
3750 """List of all packets, in order of definition"""
3751
3752 packets_by_type: "dict[str, Packet]"
3753 """Maps packet types (PACKET_FOO) to the packet with that type"""
3754
3755 packets_by_number: "dict[int, Packet]"
3756 """Maps packet IDs to the packet with that ID"""
3757
3758 packets_by_dirs: "dict[Directions, list[Packet]]"
3759 """Maps packet directions to lists of packets with those
3760 directions, in order of definition"""
3761
3762 @classmethod
3763 def _clean_lines(cls, lines: typing.Iterable[str]) -> typing.Iterator[str]:
3764 """Strip comments and leading/trailing whitespace from the given
3765 lines. If a block comment starts in one line and ends in another,
3766 the remaining parts are joined together and yielded as one line."""
3767 inside_comment = False
3768 parts = []
3769
3770 for line in lines:
3771 while line:
3772 if inside_comment:
3773 # currently inside a block comment ~> look for */
3774 mo = cls.COMMENT_END_PATTERN.fullmatch(line)
3775 assert mo, repr(line)
3776 # If the group wasn't captured (None), we haven't found
3777 # a */ to end our comment ~> still inside_comment
3778 # Otherwise, group captured remaining line content
3779 line, = mo.groups(None)
3780 inside_comment = line is None
3781 else:
3782 mo = cls.COMMENT_START_PATTERN.fullmatch(line)
3783 assert mo, repr(line)
3784 # If the second group wasn't captured (None), there is
3785 # no /* to start a block comment ~> not inside_comment
3786 part, line = mo.groups(None)
3787 inside_comment = line is not None
3788 if part: parts.append(part)
3789
3790 if (not inside_comment) and parts:
3791 # when ending a line outside a block comment, yield what
3792 # we've accumulated
3793 yield " ".join(parts)
3794 parts.clear()
3795
3796 if inside_comment:
3797 raise ValueError("EOF while scanning block comment")
3798
3799 def parse_lines(self, lines: typing.Iterable[str]):
3800 """Parse the given lines as type and packet definitions."""
3801 self.parse_clean_lines(self._clean_lines(lines))
3802
3803 def parse_clean_lines(self, lines: typing.Iterable[str]):
3804 """Parse the given lines as type and packet definitions. Comments
3805 and blank lines must already be removed beforehand."""
3806 # hold on to the iterator itself
3807 lines_iter = iter(lines)
3808 for line in lines_iter:
3809 mo = self.TYPE_PATTERN.fullmatch(line)
3810 if mo is not None:
3811 self.define_type(*mo.groups())
3812 continue
3813
3814 mo = self.PACKET_HEADER_PATTERN.fullmatch(line)
3815 if mo is not None:
3816 packet_type, packet_number, flags_text = mo.groups("")
3817 packet_number = int(packet_number)
3818
3819 if packet_type in self.packets_by_type:
3820 raise ValueError("Duplicate packet type: " + packet_type)
3821
3822 if packet_number not in range(65536):
3823 raise ValueError(f"packet number {packet_number:d} for {packet_type} outside legal range [0,65536)")
3824 if packet_number in self.packets_by_number:
3825 raise ValueError(f"Duplicate packet number: {packet_number:d} ({self.packets_by_number[packet_number].type} and {packet_type})")
3826
3827 packet = Packet(
3828 self.cfg, packet_type, packet_number, flags_text,
3829 takewhile(
3830 lambda line: self.PACKET_END_PATTERN.fullmatch(line) is None,
3831 lines_iter, # advance the iterator used by this for loop
3832 ),
3833 self.resolve_type,
3834 )
3835
3836 self.packets.append(packet)
3837 self.packets_by_number[packet_number] = packet
3838 self.packets_by_type[packet_type] = packet
3839 self.packets_by_dirs[packet.dirs].append(packet)
3840 continue
3841
3842 raise ValueError("Unexpected line: " + line)
3843
3844 def resolve_type(self, type_text: str) -> RawFieldType:
3845 """Resolve the given type"""
3846 if type_text not in self.types:
3847 self.types[type_text] = self.type_registry.parse(type_text)
3848 return self.types[type_text]
3849
3850 def define_type(self, alias: str, meaning: str):
3851 """Define a type alias"""
3852 if alias in self.types:
3853 raise ValueError(f"duplicate type alias {alias!r}: {self.types[alias]} and {meaning}")
3854 self.types[alias] = self.resolve_type(meaning)
3855
3856 def __init__(self, cfg: ScriptConfig, type_registry: "TypeRegistry | None" = None):
3857 self.cfg = cfg
3858 self.type_registry = type_registry or DEFAULT_REGISTRY
3859 self.types = {}
3860 self.packets = []
3861 self.packets_by_type = {}
3862 self.packets_by_number = {}
3863 self.packets_by_dirs = {
3864 dirs: []
3865 for dirs in Directions
3866 }
3867
3868 def __iter__(self) -> typing.Iterator[Packet]:
3869 return iter(self.packets)
3870
3871 def iter_by_number(self) -> "typing.Generator[tuple[int, Packet, int], None, int]":
3872 """Yield (number, packet, skipped) tuples in order of packet number.
3873
3874 skipped is how many numbers were skipped since the last packet
3875
3876 Return the maximum packet number (or -1 if there are no packets)
3877 when used with `yield from`."""
3878 last = -1
3879 for n, packet in sorted(self.packets_by_number.items()):
3880 assert n == packet.type_number
3881 yield (n, packet, n - last - 1)
3882 last = n
3883 return last
3884
3885 @property
3886 def all_caps(self) -> "set[str]":
3887 """Set of all capabilities affecting the defined packets"""
3888 return { cap for p in self for cap in p.all_caps }
3889
3890 @property
3891 def code_packet_functional_capability(self) -> str:
3892 """Code fragment defining the packet_functional_capability string"""
3893 return f"""\
3894const char *const packet_functional_capability = "{' '.join(sorted(self.all_caps))}";
3895"""
3896
3897 @property
3898 def code_delta_stats_report(self) -> str:
3899 """Code fragment implementing the delta_stats_report() function"""
3900 if not self.cfg.gen_stats: return f"""\
3901void delta_stats_report(void) {{}}
3902"""
3903
3904 intro = f"""\
3905void delta_stats_report(void) {{
3906 int i;
3907"""
3908 extro = f"""\
3909}}
3910
3911"""
3912 body = "".join(
3913 prefix(" ", packet.get_report_part())
3914 for packet in self
3915 )
3916 return intro + body + extro
3917
3918 @property
3919 def code_delta_stats_reset(self) -> str:
3920 """Code fragment implementing the delta_stats_reset() function"""
3921 if not self.cfg.gen_stats: return f"""\
3922void delta_stats_reset(void) {{}}
3923
3924"""
3925
3926 intro = f"""\
3927void delta_stats_reset(void) {{
3928"""
3929 extro = f"""\
3930}}
3931
3932"""
3933 body = "\n".join(
3934 prefix(" ", packet.get_reset_part())
3935 for packet in self
3936 )
3937 return intro + body + extro
3938
3939 @property
3940 def code_packet_name(self) -> str:
3941 """Code fragment implementing the packet_name() function"""
3942 intro = f"""\
3943const char *packet_name(enum packet_type type)
3944{{
3945 static const char *const names[PACKET_LAST] = {{
3946"""
3947
3948 body = ""
3949 for _, packet, skipped in self.iter_by_number():
3950 body += f"""\
3951 "unknown",
3952""" * skipped
3953 body += f"""\
3954 "{packet.type}",
3955"""
3956
3957 extro = f"""\
3958 }};
3959
3960 return (type < PACKET_LAST ? names[type] : "unknown");
3961}}
3962
3963"""
3964 return intro + body + extro
3965
3966 @property
3967 def code_packet_has_game_info_flag(self) -> str:
3968 """Code fragment implementing the packet_has_game_info_flag()
3969 function"""
3970 intro = f"""\
3971bool packet_has_game_info_flag(enum packet_type type)
3972{{
3973 static const bool flag[PACKET_LAST] = {{
3974"""
3975 body = "".join(
3976 f"""\
3977 [{packet.type}] = TRUE,
3978"""
3979 for packet in self
3980 if packet.is_info == "game"
3981 )
3982
3983 extro = f"""\
3984 /* others are FALSE by default */
3985 }};
3986
3987 return (type < PACKET_LAST) && flag[type];
3988}}
3989
3990"""
3991 return intro + body + extro
3992
3993 @property
3994 def code_packet_handlers_fill_initial(self) -> str:
3995 """Code fragment implementing the packet_handlers_fill_initial()
3996 function"""
3997 intro = f"""\
3998void packet_handlers_fill_initial(struct packet_handlers *phandlers)
3999{{
4000"""
4001 for cap in sorted(self.all_caps):
4002 intro += f"""\
4003 fc_assert_msg(has_capability("{cap}", our_capability),
4004 "Packets have support for unknown '{cap}' capability!");
4005"""
4006
4007 down_only = [
4008 packet.variants[0]
4009 for packet in self.packets_by_dirs[Directions.DOWN_ONLY]
4010 if len(packet.variants) == 1
4011 ]
4012 up_only = [
4013 packet.variants[0]
4014 for packet in self.packets_by_dirs[Directions.UP_ONLY]
4015 if len(packet.variants) == 1
4016 ]
4017 unrestricted = [
4018 packet.variants[0]
4019 for packet in self.packets_by_dirs[Directions.UNRESTRICTED]
4020 if len(packet.variants) == 1
4021 ]
4022
4023 body = ""
4024 for variant in unrestricted:
4025 body += prefix(" ", variant.fill_send_handler)
4026 body += prefix(" ", variant.fill_receive_handler)
4027 body += f"""\
4028 if (is_server()) {{
4029"""
4030 for variant in down_only:
4031 body += prefix(" ", variant.fill_send_handler)
4032 for variant in up_only:
4033 body += prefix(" ", variant.fill_receive_handler)
4034 body += f"""\
4035 }} else /* not is_server() */ {{
4036"""
4037 for variant in up_only:
4038 body += prefix(" ", variant.fill_send_handler)
4039 for variant in down_only:
4040 body += prefix(" ", variant.fill_receive_handler)
4041
4042 extro = f"""\
4043 }}
4044}}
4045
4046"""
4047 return intro + body + extro
4048
4049 @property
4050 def code_packet_handlers_fill_capability(self) -> str:
4051 """Code fragment implementing the packet_handlers_fill_capability()
4052 function"""
4053 intro = f"""\
4054void packet_handlers_fill_capability(struct packet_handlers *phandlers,
4055 const char *capability)
4056{{
4057"""
4058
4059 down_only = [
4060 packet
4061 for packet in self.packets_by_dirs[Directions.DOWN_ONLY]
4062 if len(packet.variants) > 1
4063 ]
4064 up_only = [
4065 packet
4066 for packet in self.packets_by_dirs[Directions.UP_ONLY]
4067 if len(packet.variants) > 1
4068 ]
4069 unrestricted = [
4070 packet
4071 for packet in self.packets_by_dirs[Directions.UNRESTRICTED]
4072 if len(packet.variants) > 1
4073 ]
4074
4075 body = ""
4076 for packet in unrestricted:
4077 # indent for the start of the if-else
4078 # ends mid-line
4079 body += f"""\
4080 """
4081 for variant in packet.variants:
4082 hand = prefix(" ", variant.fill_send_handler + variant.fill_receive_handler)
4083 # starts and ends mid-line
4084 body += f"""if ({variant.condition}) {{
4085 {self.cfg.log_macro}("{packet.type}: using variant={variant.var_number} cap=%s", capability);
4086{hand}\
4087 }} else """
4088 # starts mid-line
4089 body += f"""{{
4090 log_error("Unknown {packet.type} variant for cap %s", capability);
4091 }}
4092"""
4093 if up_only or down_only:
4094 body += f"""\
4095 if (is_server()) {{
4096"""
4097 for packet in down_only:
4098 # indent for the start of the if-else
4099 # ends mid-line
4100 body += f"""\
4101 """
4102 for variant in packet.variants:
4103 hand = prefix(" ", variant.fill_send_handler)
4104 # starts and ends mid-line
4105 body += f"""if ({variant.condition}) {{
4106 {self.cfg.log_macro}("{packet.type}: using variant={variant.var_number} cap=%s", capability);
4107{hand}\
4108 }} else """
4109 # starts mid-line
4110 body += f"""{{
4111 log_error("Unknown {packet.type} variant for cap %s", capability);
4112 }}
4113"""
4114 for packet in up_only:
4115 # indent for the start of the if-else
4116 # ends mid-line
4117 body += f"""\
4118 """
4119 for variant in packet.variants:
4120 hand = prefix(" ", variant.fill_receive_handler)
4121 # starts and ends mid-line
4122 body += f"""if ({variant.condition}) {{
4123 {self.cfg.log_macro}("{packet.type}: using variant={variant.var_number} cap=%s", capability);
4124{hand}\
4125 }} else """
4126 # starts mid-line
4127 body += f"""{{
4128 log_error("Unknown {packet.type} variant for cap %s", capability);
4129 }}
4130"""
4131 body += f"""\
4132 }} else /* not is_server() */ {{
4133"""
4134 for packet in up_only:
4135 # indent for the start of the if-else
4136 # ends mid-line
4137 body += f"""\
4138 """
4139 for variant in packet.variants:
4140 hand = prefix(" ", variant.fill_send_handler)
4141 # starts and ends mid-line
4142 body += f"""if ({variant.condition}) {{
4143 {self.cfg.log_macro}("{packet.type}: using variant={variant.var_number} cap=%s", capability);
4144{hand}\
4145 }} else """
4146 # starts mid-line
4147 body += f"""{{
4148 log_error("Unknown {packet.type} variant for cap %s", capability);
4149 }}
4150"""
4151 for packet in down_only:
4152 # indent for the start of the if-else
4153 # ends mid-line
4154 body += f"""\
4155 """
4156 for variant in packet.variants:
4157 hand = prefix(" ", variant.fill_receive_handler)
4158 # starts and ends mid-line
4159 body += f"""if ({variant.condition}) {{
4160 {self.cfg.log_macro}("{packet.type}: using variant={variant.var_number} cap=%s", capability);
4161{hand}\
4162 }} else """
4163 # starts mid-line
4164 body += f"""{{
4165 log_error("Unknown {packet.type} variant for cap %s", capability);
4166 }}
4167"""
4168 body += f"""\
4169 }}
4170"""
4171
4172 extro = f"""\
4173}}
4174"""
4175 return intro + body + extro
4176
4177 @property
4178 def code_packet_destroy(self) -> str:
4179 """Code fragment implementing the packet_destroy() function"""
4180 # NB: missing packet IDs are empty-initialized, i.e. set to nullptr by default
4181 handlers = "".join(
4182 f"""\
4183 [{packet.type}] = destroy_{packet.name},
4184"""
4185 for packet in self
4186 )
4187
4188 return f"""\
4189
4190void packet_destroy(void *packet, enum packet_type type)
4191{{
4192 static void (*const destroy_handlers[PACKET_LAST])(void *packet) = {{
4193{handlers}\
4194 }};
4195 void (*handler)(void *packet) = (type < PACKET_LAST ? destroy_handlers[type] : nullptr);
4196
4197 if (handler == nullptr) {{
4198 handler = free;
4199 log_error("packet_destroy(): Invalid packet type %d", type);
4200 }}
4201
4202 handler(packet);
4203}}
4204"""
4205
4206 @property
4207 def code_enum_packet(self) -> str:
4208 """Code fragment declaring the packet_type enum"""
4209 intro = f"""\
4210enum packet_type {{
4211"""
4212 body = ""
4213 for n, packet, skipped in self.iter_by_number():
4214 if skipped:
4215 line = f" {packet.type} = {n:d},"
4216 else:
4217 line = f" {packet.type},"
4218
4219 if not (n % 10):
4220 line = f"{line:40} /* {n:d} */"
4221 body += line + "\n"
4222
4223 extro = f"""\
4224
4225 PACKET_LAST /* leave this last */
4226}};
4227
4228"""
4229 return intro + body + extro
4230
4231
4232########################### Writing output files ###########################
4233
4234def write_common_header(path: "str | Path | None", packets: PacketsDefinition):
4235 """Write contents for common/packets_gen.h to the given path"""
4236 if path is None:
4237 return
4238 with packets.cfg.open_write(path, wrap_header = "packets_gen") as output_h:
4239 output_h.write(f"""\
4240/* common */
4241#include "actions.h"
4242#include "city.h"
4243#include "conn_types.h"
4244#include "disaster.h"
4245#include "events.h"
4246#include "player.h"
4247#include "tech.h"
4248#include "unit.h"
4249
4250/* common/aicore */
4251#include "cm.h"
4252
4253""")
4254
4255 # write structs
4256 for p in packets:
4257 output_h.write(p.get_struct())
4258
4259 output_h.write(packets.code_enum_packet)
4260
4261 # write function prototypes
4262 for p in packets:
4263 output_h.write(p.get_prototypes())
4264 output_h.write(f"""\
4265void delta_stats_report(void);
4266void delta_stats_reset(void);
4267""")
4268
4269def write_common_impl(path: "str | Path | None", packets: PacketsDefinition):
4270 """Write contents for common/packets_gen.c to the given path"""
4271 if path is None:
4272 return
4273 with packets.cfg.open_write(path) as output_c:
4274 output_c.write(f"""\
4275#ifdef HAVE_CONFIG_H
4276#include <fc_config.h>
4277#endif
4278
4279#include <string.h>
4280
4281/* utility */
4282#include "bitvector.h"
4283#include "capability.h"
4284#include "genhash.h"
4285#include "log.h"
4286#include "mem.h"
4287#include "support.h"
4288
4289/* common */
4290#include "capstr.h"
4291#include "connection.h"
4292#include "dataio.h"
4293#include "game.h"
4294
4295#include "packets.h"
4296
4297""")
4298 output_c.write(packets.code_packet_functional_capability)
4299 output_c.write(f"""\
4300
4301#ifdef FREECIV_DELTA_PROTOCOL
4302static genhash_val_t hash_const(const void *vkey)
4303{{
4304 return 0;
4305}}
4306
4307static bool cmp_const(const void *vkey1, const void *vkey2)
4308{{
4309 return TRUE;
4310}}
4311#endif /* FREECIV_DELTA_PROTOCOL */
4312
4313""")
4314
4315 if packets.cfg.gen_stats:
4316 output_c.write(f"""\
4317static int stats_total_sent;
4318
4319""")
4320 for p in packets:
4321 output_c.write(p.get_stats())
4322 # report and reset functions always exist
4323 output_c.write(packets.code_delta_stats_report)
4324 output_c.write(packets.code_delta_stats_reset)
4325
4326 output_c.write(packets.code_packet_name)
4327 output_c.write(packets.code_packet_has_game_info_flag)
4328
4329 # write packet-specific code
4330 for p in packets:
4331 output_c.write(p.get_init())
4332 output_c.write(p.get_free_destroy())
4333 output_c.write(p.get_variants())
4334 output_c.write(p.get_send())
4335 output_c.write(p.get_lsend())
4336 output_c.write(p.get_dsend())
4337 output_c.write(p.get_dlsend())
4338
4339 output_c.write(packets.code_packet_handlers_fill_initial)
4340 output_c.write(packets.code_packet_handlers_fill_capability)
4341 output_c.write(packets.code_packet_destroy)
4342
4343def write_server_header(path: "str | Path | None", packets: PacketsDefinition):
4344 """Write contents for server/hand_gen.h to the given path"""
4345 if path is None:
4346 return
4347 with packets.cfg.open_write(path, wrap_header = "hand_gen", cplusplus = False) as f:
4348 f.write(f"""\
4349/* utility */
4350#include "shared.h"
4351
4352/* common */
4353#include "fc_types.h"
4354#include "packets.h"
4355
4356struct connection;
4357
4358bool server_handle_packet(enum packet_type type, const void *packet,
4359 struct player *pplayer, struct connection *pconn);
4360
4361""")
4362
4363 for p in packets:
4364 if not p.dirs.up: continue
4365 if p.no_handle: continue
4366 name_tail = p.name[len("packet_"):]
4367
4368 if p.handle_per_conn:
4369 params = "struct connection *pc"
4370 else:
4371 params = "struct player *pplayer"
4372
4373 if p.handle_via_packet:
4374 params += f", const struct {p.name} *packet"
4375 else:
4376 params += "".join(
4377 f", {field.get_param()}"
4378 for field in p.fields
4379 )
4380
4381 f.write(f"""\
4382void handle_{name_tail}({params});
4383""")
4384
4385def write_client_header(path: "str | Path | None", packets: PacketsDefinition):
4386 """Write contents for client/packhand_gen.h to the given path"""
4387 if path is None:
4388 return
4389 with packets.cfg.open_write(path, wrap_header = "packhand_gen") as f:
4390 f.write(f"""\
4391/* utility */
4392#include "shared.h"
4393
4394/* common */
4395#include "packets.h"
4396
4397bool client_handle_packet(enum packet_type type, const void *packet);
4398
4399""")
4400 for p in packets:
4401 if not p.dirs.down: continue
4402 if p.no_handle: continue
4403 name_tail = p.name[len("packet_"):]
4404
4405 if p.handle_via_packet:
4406 params = f"const struct {p.name} *packet"
4407 else:
4408 params = ", ".join(
4409 field.get_param()
4410 for field in p.fields
4411 ) or "void"
4412
4413 f.write(f"""\
4414void handle_{name_tail}({params});
4415""")
4416
4417def write_server_impl(path: "str | Path | None", packets: PacketsDefinition):
4418 """Write contents for server/hand_gen.c to the given path"""
4419 if path is None:
4420 return
4421 with packets.cfg.open_write(path) as f:
4422 f.write(f"""\
4423#ifdef HAVE_CONFIG_H
4424#include <fc_config.h>
4425#endif
4426
4427/* common */
4428#include "packets.h"
4429
4430#include "hand_gen.h"
4431
4432bool server_handle_packet(enum packet_type type, const void *packet,
4433 struct player *pplayer, struct connection *pconn)
4434{{
4435 switch (type) {{
4436""")
4437 for p in packets:
4438 if not p.dirs.up: continue
4439 if p.no_handle: continue
4440 name_tail = p.name[len("packet_"):]
4441
4442 if p.handle_per_conn:
4443 args = "pconn"
4444 else:
4445 args = "pplayer"
4446
4447 if p.handle_via_packet:
4448 args += ", packet"
4449 else:
4450 packet = f"((const struct {p.name} *)packet)"
4451 args += "".join(
4452 ",\n " + field.get_handle_arg(packet)
4453 for field in p.fields
4454 )
4455
4456 f.write(f"""\
4457 case {p.type}:
4458 handle_{name_tail}({args});
4459 return TRUE;
4460
4461""")
4462 f.write(f"""\
4463 default:
4464 return FALSE;
4465 }}
4466}}
4467""")
4468
4469def write_client_impl(path: "str | Path | None", packets: PacketsDefinition):
4470 """Write contents for client/packhand_gen.c to the given path"""
4471 if path is None:
4472 return
4473 with packets.cfg.open_write(path) as f:
4474 f.write(f"""\
4475#ifdef HAVE_CONFIG_H
4476#include <fc_config.h>
4477#endif
4478
4479/* common */
4480#include "packets.h"
4481
4482#include "packhand_gen.h"
4483
4484bool client_handle_packet(enum packet_type type, const void *packet)
4485{{
4486 switch (type) {{
4487""")
4488 for p in packets:
4489 if not p.dirs.down: continue
4490 if p.no_handle: continue
4491 name_tail = p.name[len("packet_"):]
4492
4493 if p.handle_via_packet:
4494 args = "packet"
4495 else:
4496 packet = f"((const struct {p.name} *)packet)"
4497 args = ",".join(
4498 "\n " + field.get_handle_arg(packet)
4499 for field in p.fields
4500 )
4501
4502 f.write(f"""\
4503 case {p.type}:
4504 handle_{name_tail}({args});
4505 return TRUE;
4506
4507""")
4508 f.write(f"""\
4509 default:
4510 return FALSE;
4511 }}
4512}}
4513""")
4514
4515
4516def main(raw_args: "typing.Sequence[str] | None" = None):
4517 """Main function. Read the given arguments, or the command line
4518 arguments if raw_args is not given, and run the packet code generation
4519 script accordingly."""
4520 script_args = ScriptConfig(raw_args)
4521
4522 packets = PacketsDefinition(script_args)
4523 for path in script_args.def_paths:
4524 with path.open() as input_file:
4525 packets.parse_lines(input_file)
4526
4527 write_common_header(script_args.common_header_path, packets)
4528 write_common_impl(script_args.common_impl_path, packets)
4529 write_server_header(script_args.server_header_path, packets)
4530 write_client_header(script_args.client_header_path, packets)
4531 write_server_impl(script_args.server_impl_path, packets)
4532 write_client_impl(script_args.client_impl_path, packets)
4533
4534
4535if __name__ == "__main__":
4536 main()
char * incite_cost
Definition comments.c:76
Path file_path("str | Path" s)
Parsing Command Line Arguments ######################.