diff --git a/.idea/runConfigurations/Generate CS files (adt_ compact, wrapped).run.xml b/.idea/runConfigurations/Generate CS files (adt_ compact, wrapped).run.xml
index eab93e70..6d4c4e3c 100644
--- a/.idea/runConfigurations/Generate CS files (adt_ compact, wrapped).run.xml
+++ b/.idea/runConfigurations/Generate CS files (adt_ compact, wrapped).run.xml
@@ -6,7 +6,7 @@
-
+
diff --git a/.idea/runConfigurations/Generate CS files (adt_ full, unwrapped).run.xml b/.idea/runConfigurations/Generate CS files (adt_ full, unwrapped).run.xml
index 9b56b74c..87764a29 100644
--- a/.idea/runConfigurations/Generate CS files (adt_ full, unwrapped).run.xml
+++ b/.idea/runConfigurations/Generate CS files (adt_ full, unwrapped).run.xml
@@ -6,7 +6,7 @@
-
+
diff --git a/.idea/runConfigurations/Generate__cs_adt__unwrapped_.xml b/.idea/runConfigurations/Generate__cs_adt__unwrapped_.xml
index 41654ff7..2ab5506e 100644
--- a/.idea/runConfigurations/Generate__cs_adt__unwrapped_.xml
+++ b/.idea/runConfigurations/Generate__cs_adt__unwrapped_.xml
@@ -6,7 +6,7 @@
-
+
diff --git a/.idea/runConfigurations/Generate__cs_adt__wrapped_.xml b/.idea/runConfigurations/Generate__cs_adt__wrapped_.xml
index de9bcae1..db71c2e5 100644
--- a/.idea/runConfigurations/Generate__cs_adt__wrapped_.xml
+++ b/.idea/runConfigurations/Generate__cs_adt__wrapped_.xml
@@ -6,7 +6,7 @@
-
+
diff --git a/.mdl/defs/tests.md b/.mdl/defs/tests.md
index 70171230..de620805 100644
--- a/.mdl/defs/tests.md
+++ b/.mdl/defs/tests.md
@@ -31,13 +31,15 @@ TEST_DIR="./target/test-regular"
# Create temporary test directories
mkdir -p "$TEST_DIR"
-rm -rf "$TEST_DIR/cs-stub" "$TEST_DIR/sc-stub"
+rm -rf "$TEST_DIR/cs-stub" "$TEST_DIR/sc-stub" "$TEST_DIR/py-stub"
# Copy stub projects, excluding generated and build artifacts
rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
./test/cs-stub/ "$TEST_DIR/cs-stub/"
rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
./test/sc-stub/ "$TEST_DIR/sc-stub/"
+rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
+ ./test/py-stub/ "$TEST_DIR/py-stub/"
$BABOON_BIN \
--model-dir ./baboon-compiler/src/test/resources/baboon/ \
@@ -56,7 +58,15 @@ $BABOON_BIN \
--test-output "$TEST_DIR/sc-stub/src/test/scala/generated-tests" \
--fixture-output "$TEST_DIR/sc-stub/src/main/scala/generated-fixtures" \
--sc-write-evolution-dict true \
- --sc-wrapped-adt-branch-codecs false
+ --sc-wrapped-adt-branch-codecs false \
+ :python \
+ --output "$TEST_DIR/py-stub/BaboonDefinitions/Generated" \
+ --test-output "$TEST_DIR/py-stub/BaboonTests/GeneratedTests" \
+ --fixture-output "$TEST_DIR/py-stub/BaboonTests/GeneratedFixtures" \
+ --generate-ueba-codecs-by-default=true \
+ --generate-json-codecs-by-default=true \
+ --py-write-evolution-dict true \
+ --py-wrapped-adt-branch-codecs false
ret success:bool=true
ret test_dir:string="$TEST_DIR"
@@ -89,6 +99,22 @@ popd
ret success:bool=true
```
+# action: test-python-regular
+
+Run Python tests with regular adt codecs.
+
+```bash
+TEST_DIR="${action.test-gen-regular-adt.test_dir}"
+pushd "$TEST_DIR/py-stub"
+python3 -m venv .venv
+source .venv/bin/activate
+python3 -m pip install -r requirements.txt
+python3 -m unittest discover -s BaboonTests/GeneratedTests/testpkg/pkg0
+popd
+
+ret success:bool=true
+```
+
# action: test-gen-wrapped-adt
Generate code with wrapped ADT branch codecs.
@@ -101,13 +127,15 @@ TEST_DIR="./target/test-wrapped"
# Create temporary test directories
mkdir -p "$TEST_DIR"
-rm -rf "$TEST_DIR/cs-stub" "$TEST_DIR/sc-stub"
+rm -rf "$TEST_DIR/cs-stub" "$TEST_DIR/sc-stub" "$TEST_DIR/py-stub"
# Copy stub projects, excluding generated and build artifacts
rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
./test/cs-stub/ "$TEST_DIR/cs-stub/"
rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
./test/sc-stub/ "$TEST_DIR/sc-stub/"
+rsync -a --exclude='Generated*' --exclude='generated-*' --exclude='bin' --exclude='obj' --exclude='target' --exclude='project/target' \
+ ./test/py-stub/ "$TEST_DIR/py-stub/"
$BABOON_BIN \
--model-dir ./baboon-compiler/src/test/resources/baboon/ \
@@ -126,7 +154,15 @@ $BABOON_BIN \
--test-output "$TEST_DIR/sc-stub/src/test/scala/generated-tests" \
--fixture-output "$TEST_DIR/sc-stub/src/main/scala/generated-fixtures" \
--sc-write-evolution-dict true \
- --sc-wrapped-adt-branch-codecs true
+ --sc-wrapped-adt-branch-codecs true \
+ :python \
+ --output "$TEST_DIR/py-stub/BaboonDefinitions/Generated" \
+ --test-output "$TEST_DIR/py-stub/BaboonTests/GeneratedTests" \
+ --fixture-output "$TEST_DIR/py-stub/BaboonTests/GeneratedFixtures" \
+ --generate-ueba-codecs-by-default=true \
+ --generate-json-codecs-by-default=true \
+ --py-write-evolution-dict true \
+ --py-wrapped-adt-branch-codecs true
ret success:bool=true
ret test_dir:string="$TEST_DIR"
@@ -159,6 +195,22 @@ popd
ret success:bool=true
```
+# action: test-python-wrapped
+
+Run Python tests with wrapped ADT codecs
+
+```bash
+TEST_DIR="${action.test-gen-regular-adt.test_dir}"
+pushd "$TEST_DIR/py-stub"
+python3 -m venv .venv
+source .venv/bin/activate
+python3 -m pip install -r requirements.txt
+python3 -m unittest discover -s BaboonTests/GeneratedTests/testpkg/pkg0
+popd
+
+ret success:bool=true
+```
+
# action: test-gen-manual
Generate code for manual test projects.
@@ -175,7 +227,26 @@ $BABOON_BIN \
:cs \
--output ./test/conv-test-cs/ConvTest/Generated \
:scala \
- --output ./test/conv-test-sc/src/main/scala/generated-main
+ --output ./test/conv-test-sc/src/main/scala/generated-main \
+ :python \
+ --output ./test/conv-test-py/Generated
+
+ret success:bool=true
+```
+
+# action: test-gen-compat-python
+
+Generate compatibility test files using Python.
+
+```bash
+dep action.test-gen-manual
+
+pushd ./test/conv-test-py
+python3 -m venv .venv
+source .venv/bin/activate
+python3 -m pip install -r requirements.txt
+python3 compat_main.py
+popd
ret success:bool=true
```
@@ -215,6 +286,7 @@ Run manual C# compatibility tests.
```bash
dep action.test-gen-compat-scala
dep action.test-gen-compat-cs
+dep action.test-gen-compat-python
pushd ./test/conv-test-cs
dotnet build
@@ -239,6 +311,20 @@ popd
ret success:bool=true
```
+# action: test-manual-python
+
+Run Python conversion test
+
+```bash
+dep action.test-gen-compat-python
+pushd ./test/conv-test-py
+source source .venv/bin/activate
+python3 -m unittest discover -s .
+popd
+
+ret success:bool=true
+```
+
# action: test
Run complete test suite (orchestrator action).
@@ -246,8 +332,10 @@ Run complete test suite (orchestrator action).
```bash
dep action.test-cs-regular
dep action.test-scala-regular
+dep action.test-python-regular
dep action.test-cs-wrapped
dep action.test-scala-wrapped
+dep action.test-python-wrapped
dep action.test-manual-cs
dep action.test-manual-scala
diff --git a/.mobala/version-commit.lock b/.mobala/version-commit.lock
new file mode 100644
index 00000000..b7b7899c
--- /dev/null
+++ b/.mobala/version-commit.lock
@@ -0,0 +1 @@
+87a6a202f917745e826d6f245f78db48cb92965b
\ No newline at end of file
diff --git a/.run/Help.run.xml b/.run/Help.run.xml
new file mode 100644
index 00000000..e69de29b
diff --git a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/Baboon.scala b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/Baboon.scala
index 0f622051..06e2fdf1 100644
--- a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/Baboon.scala
+++ b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/Baboon.scala
@@ -70,6 +70,25 @@ object Baboon {
),
)
}
+ case "python" =>
+ CaseApp.parse[PyCLIOptions](roleArgs).leftMap(e => s"Can't parse cs CLI: $e").map {
+ case (opts, _) =>
+ val shopts = mkGenericOpts(opts)
+
+ CompilerTarget.PyTarget(
+ id = "Python",
+ output = shopts.outOpts,
+ generic = shopts.genericOpts,
+ language = PyOptions(
+ writeEvolutionDict = opts.pyWriteEvolutionDict.getOrElse(true),
+ wrappedAdtBranchCodecs = opts.pyWrappedAdtBranchCodecs.getOrElse(false),
+ generateJsonCodecs = opts.generateJsonCodecs.getOrElse(true),
+ generateUebaCodecs = opts.generateUebaCodecs.getOrElse(true),
+ generateJsonCodecsByDefault = opts.generateJsonCodecsByDefault.getOrElse(false),
+ generateUebaCodecsByDefault = opts.generateUebaCodecsByDefault.getOrElse(false),
+ ),
+ )
+ }
case r => Left(s"Unknown role id: $r")
}
}
@@ -119,7 +138,7 @@ object Baboon {
val safeToRemove = NEList.from(opts.extAllowCleanup) match {
case Some(value) => value.toSet
- case None => Set("meta", "cs", "json", "scala")
+ case None => Set("meta", "cs", "json", "scala", "py", "pyc")
}
val outOpts = OutputOptions(
@@ -150,6 +169,8 @@ object Baboon {
new BaboonJvmCSModule[F](t)
case t: CompilerTarget.ScTarget =>
new BaboonJvmScModule[F](t)
+ case t: CompilerTarget.PyTarget =>
+ new BaboonJvmPyModule[F](t)
}
Injector
diff --git a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/BaboonModuleJvm.scala b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/BaboonModuleJvm.scala
index d9981ecc..e3aff751 100644
--- a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/BaboonModuleJvm.scala
+++ b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/BaboonModuleJvm.scala
@@ -1,7 +1,7 @@
package io.septimalmind.baboon
import distage.ModuleDef
-import io.septimalmind.baboon.CompilerTarget.{CSTarget, ScTarget}
+import io.septimalmind.baboon.CompilerTarget.{CSTarget, PyTarget, ScTarget}
import io.septimalmind.baboon.parser.{BaboonInclusionResolver, BaboonInclusionResolverImpl}
import io.septimalmind.baboon.util.BLogger
import izumi.functional.bio.unsafe.MaybeSuspend2
@@ -25,6 +25,12 @@ class BaboonJvmScModule[F[+_, +_]: Error2: TagKK](target: ScTarget) extends Modu
make[ScTarget].fromValue(target)
}
+class BaboonJvmPyModule[F[+_, +_]: Error2: TagKK](target: PyTarget) extends ModuleDef {
+ include(new SharedTranspilerJvmModule[F]())
+ include(new BaboonCommonPyModule[F]())
+ make[PyTarget].fromValue(target)
+}
+
class BaboonModuleJvm[F[+_, +_]: Error2: MaybeSuspend2: TagKK](
options: CompilerOptions,
parallelAccumulatingOps2: ParallelErrorAccumulatingOps2[F],
diff --git a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/CLIOptions.scala b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/CLIOptions.scala
index 53fda68d..8d865611 100644
--- a/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/CLIOptions.scala
+++ b/baboon-compiler/.jvm/src/main/scala/io/septimalmind/baboon/CLIOptions.scala
@@ -70,6 +70,25 @@ case class ScCLIOptions(
scWrappedAdtBranchCodecs: Option[Boolean],
) extends SharedCLIOptions
+case class PyCLIOptions(
+ @Recurse
+ generic: GenericTranspilerCLIOptions,
+ @HelpMessage("Allow to erase target directory even if files with these extensions exist there. Default: cs,json,meta")
+ extAllowCleanup: List[String],
+ @HelpMessage("Adds evolution metadata as a Scala dictionary")
+ pyWriteEvolutionDict: Option[Boolean],
+ @HelpMessage("Every ADT branch will encode ADT metadata and expect it in the decoder")
+ pyWrappedAdtBranchCodecs: Option[Boolean],
+ @HelpMessage("Generate JSON codecs")
+ generateJsonCodecs: Option[Boolean],
+ @HelpMessage("Generate UEBA codecs")
+ generateUebaCodecs: Option[Boolean],
+ @HelpMessage("Generate JSON codecs even for types without derived[json]")
+ generateJsonCodecsByDefault: Option[Boolean],
+ @HelpMessage("Generate UEBA codecs even for types without derived[ueba]")
+ generateUebaCodecsByDefault: Option[Boolean],
+) extends SharedCLIOptions
+
case class CLIOptions(
@HelpMessage("A list of *.baboon files to process (can be combined with --model-dir)")
model: List[String],
diff --git a/baboon-compiler/src/main/resources/META-INF/native-image/resource-config.json b/baboon-compiler/src/main/resources/META-INF/native-image/resource-config.json
index 4420bcac..ea665662 100644
--- a/baboon-compiler/src/main/resources/META-INF/native-image/resource-config.json
+++ b/baboon-compiler/src/main/resources/META-INF/native-image/resource-config.json
@@ -1,6 +1,7 @@
{
"resources": [
{"pattern": "baboon-runtime/cs/.*\\.cs"},
- {"pattern": "baboon-runtime/scala/.*\\.scala"}
+ {"pattern": "baboon-runtime/scala/.*\\.scala"},
+ {"pattern": "baboon-runtime/python/.*\\.py"}
]
}
\ No newline at end of file
diff --git a/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_codecs.py b/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_codecs.py
new file mode 100644
index 00000000..f3ebef6b
--- /dev/null
+++ b/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_codecs.py
@@ -0,0 +1,336 @@
+from datetime import datetime, timedelta, timezone
+from abc import ABC, abstractmethod
+from typing import TypeVar, Generic
+from decimal import Decimal
+from io import BytesIO
+from uuid import UUID
+import struct
+
+from pydantic import BaseModel
+
+T = TypeVar("T")
+
+class IBaboonCodecData(ABC):
+ @abstractmethod
+ def baboon_domain_version(self) -> str:
+ raise NotImplementedError
+
+ @abstractmethod
+ def baboon_domain_identifier(self) -> str:
+ raise NotImplementedError
+
+ @abstractmethod
+ def baboon_type_identifier(self) -> str:
+ raise NotImplementedError
+
+class BaboonJsonCodec(IBaboonCodecData, Generic[T]):
+ @abstractmethod
+ def encode(self, obj: T) -> str:
+ raise NotImplementedError
+
+ @abstractmethod
+ def decode(self, json_str: str) -> T:
+ raise NotImplementedError
+
+class BaboonUEBACodec(IBaboonCodecData, Generic[T]):
+ @abstractmethod
+ def encode(self, ctx: 'BaboonCodecContext', wire: 'LEDataOutputStream', obj: T):
+ raise NotImplementedError
+
+ @abstractmethod
+ def decode(self, ctx: 'BaboonCodecContext', wire: 'LEDataInputStream') -> T:
+ raise NotImplementedError
+
+class BaboonCodecContext:
+ def __init__(self, use_indices: bool):
+ self.use_indices = use_indices
+
+ @classmethod
+ def indexed(cls):
+ return cls(True)
+
+ @classmethod
+ def compact(cls):
+ return cls(False)
+
+ @classmethod
+ def default(cls):
+ return cls(False)
+
+class BaboonIndexEntry(BaseModel):
+ offset: int
+ length: int
+
+class BaboonBinCodecIndexed(ABC):
+ @abstractmethod
+ def index_elements_count(self, ctx: BaboonCodecContext) -> int: ...
+
+ def read_index(self, ctx: BaboonCodecContext, wire: 'LEDataInputStream') -> list[BaboonIndexEntry]:
+ header = wire.read_byte()
+ is_indexed = (header & 0b00000001) != 0
+ result: list[BaboonIndexEntry] = []
+
+ prev_offset = 0
+ prev_len = 0
+
+ if is_indexed:
+ left = self.index_elements_count(ctx)
+ while left > 0:
+ offset = wire.read_u32()
+ length = wire.read_u32()
+
+ assert length > 0, "Length must be positive"
+ assert offset >= prev_offset + prev_len, f"Offset violation: {offset} < {prev_offset + prev_len}"
+
+ result.append(BaboonIndexEntry(offset=offset, length=length))
+ left -= 1
+ prev_offset = offset
+ prev_len = length
+
+ return result
+
+CS_EPOCH_DIFF_MS = 62_135_596_800_000
+
+class LEDataOutputStream:
+ def __init__(self, stream: BytesIO):
+ self.stream = stream
+
+ def write(self, b: bytes):
+ self.stream.write(b)
+
+ def write_byte(self, b: int):
+ self.stream.write(struct.pack(" 28:
+ f = f.quantize(Decimal("1.0000000000000000000000000000"))
+
+ sign, digits_tuple, exponent = f.as_tuple()
+ mantissa = 0
+ for d in digits_tuple:
+ mantissa = mantissa * 10 + d
+
+ scale = 0
+ if exponent < 0:
+ scale = -exponent
+ elif exponent > 0:
+ mantissa *= (10 ** exponent)
+ scale = 0
+
+ if mantissa >= (1 << 96):
+ raise ValueError(f"Decimal value {f} is too large for C# Decimal (96-bit limit).")
+
+ flags = (scale << 16)
+ if sign == 1:
+ flags |= 0x80000000
+
+ lo = mantissa & 0xFFFFFFFF
+ mid = (mantissa >> 32) & 0xFFFFFFFF
+ hi = (mantissa >> 64) & 0xFFFFFFFF
+
+ self.write_u32(lo)
+ self.write_u32(mid)
+ self.write_u32(hi)
+ self.write_u32(flags)
+
+ def write_bool(self, b: bool):
+ self.stream.write(struct.pack("?", b))
+
+ def write_uuid(self, u: UUID):
+ self.stream.write(u.bytes_le)
+
+ def write_str(self, s: str):
+ bytes_data = s.encode("utf-8")
+ value = len(bytes_data)
+ while True:
+ current_byte = value & 0x7F
+ value >>= 7
+ if value != 0:
+ current_byte |= 0x80
+ self.write_byte(current_byte)
+ if value == 0:
+ break
+ self.stream.write(bytes_data)
+
+ def write_datetime(self, d: datetime):
+ if d.tzinfo is None:
+ d = d.replace(tzinfo=timezone.utc)
+
+ off = d.utcoffset()
+ offset_ms = 0
+ if off:
+ offset_ms = (off.days * 86400 + off.seconds) * 1000 + off.microseconds // 1000
+
+ unix_epoch = datetime(1970, 1, 1, tzinfo=timezone.utc)
+ delta = d - unix_epoch
+
+ unix_utc_ms = (
+ (delta.days * 86_400 * 1000) +
+ (delta.seconds * 1000) +
+ (delta.microseconds // 1000)
+ )
+
+ cs_utc_millis_0001 = unix_utc_ms + CS_EPOCH_DIFF_MS
+ cs_local_millis_0001 = cs_utc_millis_0001 + offset_ms
+
+ self.write_i64(cs_local_millis_0001)
+ self.write_i64(offset_ms)
+
+ kind = 1 if offset_ms == 0 else 2
+ self.write_byte(kind)
+
+ def write_bytes(self, b: bytes):
+ self.write_u32(len(b))
+ self.stream.write(b)
+
+ def write_optional(self, o, f):
+ if o is None:
+ self.write_byte(0)
+ else:
+ self.write_byte(1)
+ f(o)
+
+ def write_seq(self, c, f):
+ self.write_i32(len(c))
+ for i in c:
+ f(i)
+
+ def write_dict(self, d, kf, vf):
+ self.write_i32(len(d))
+ for k,v in d.items():
+ kf(k)
+ vf(v)
+
+class LEDataInputStream:
+ def __init__(self, stream : BytesIO):
+ self.stream = stream
+
+ def read_byte(self) -> int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" int:
+ return struct.unpack(" float:
+ return struct.unpack(" float:
+ return struct.unpack(" Decimal:
+ lo = self.read_u32()
+ mid = self.read_u32()
+ hi = self.read_u32()
+ flags = self.read_u32()
+
+ # combine into 96-bit integer
+ mantissa = lo | (mid << 32) | (hi << 64)
+
+ scale = (flags >> 16) & 0xFF
+ sign = (flags >> 31) & 1
+
+ value = Decimal(mantissa) / (Decimal(10) ** scale)
+ return -value if sign else value
+
+ def read_bool(self) -> bool:
+ return struct.unpack("?", self.stream.read(1))[0]
+
+ def read_uuid(self) -> UUID:
+ return UUID(bytes_le = self.stream.read(16))
+
+ def read_str(self) -> str:
+ length = 0
+ shift = 0
+ while True:
+ byte_read = self.read_byte() & 0xFF
+ length |= (byte_read & 0x7F) << shift
+ if (byte_read & 0x80) == 0:
+ break
+ shift += 7
+ buffer = self.stream.read(length)
+ return buffer.decode("utf-8")
+
+ def read_datetime(self) -> datetime:
+ cs_local_millis_0001 = self.read_i64()
+ offset_millis = self.read_i64()
+ kind = self.read_byte()
+
+ cs_utc_millis_0001 = cs_local_millis_0001 - offset_millis
+ unix_utc_millis = cs_utc_millis_0001 - CS_EPOCH_DIFF_MS
+ tz = timezone(timedelta(milliseconds=offset_millis))
+ unix_epoch = datetime(1970, 1, 1, tzinfo=timezone.utc)
+ dt_utc = unix_epoch + timedelta(milliseconds=unix_utc_millis)
+
+ return dt_utc.astimezone(tz)
+
+ def read_bytes(self) -> bytes:
+ length = self.read_u32()
+ return self.stream.read(length)
+
+class AbstractBaboonCodecs:
+ def __init__(self):
+ self._codecs = {}
+
+ def register(self, codec_id: str, impl: IBaboonCodecData):
+ self._codecs[codec_id] = impl
+
+ def find(self, codec_id: str) -> IBaboonCodecData:
+ return self._codecs[codec_id]
+
+ def try_find(self, codec_id: str) -> tuple[bool, object | None]:
+ value = self._codecs.get(codec_id)
+ if value is not None:
+ return True, value
+ else:
+ return False, None
+
+class AbstractBaboonJsonCodecs(AbstractBaboonCodecs):
+ pass
+
+class AbstractBaboonUebaCodecs(AbstractBaboonCodecs):
+ pass
\ No newline at end of file
diff --git a/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_runtime_shared.py b/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_runtime_shared.py
new file mode 100644
index 00000000..21d3df01
--- /dev/null
+++ b/baboon-compiler/src/main/resources/baboon-runtime/python/baboon_runtime_shared.py
@@ -0,0 +1,203 @@
+
+
+from datetime import datetime, timedelta, timezone
+from typing import TypeVar, Generic, ClassVar
+from abc import ABC, abstractmethod
+from pydantic import BaseModel, ConfigDict
+from uuid import UUID, uuid4
+from decimal import Decimal
+from functools import wraps
+import struct
+import warnings
+import random
+import string
+
+T = TypeVar("T")
+To = TypeVar("To")
+From = TypeVar("From")
+
+
+class BaboonMeta:
+ pass
+
+
+class IBaboonGenerated:
+ pass
+
+
+class IBaboonGeneratedLatest:
+ pass
+
+
+class IBaboonAdtMemberMeta:
+ pass
+
+
+def deprecated(message):
+ def decorator(cls):
+ original_init = cls.__init__
+
+ @wraps(original_init)
+ def new_init(self, *args, **kwargs):
+ warnings.warn(
+ message,
+ DeprecationWarning,
+ stacklevel=2
+ )
+ original_init(self, *args, **kwargs)
+
+ cls.__init__ = new_init
+ return cls
+
+ return decorator
+
+class Fixture:
+ @staticmethod
+ def next_byte() -> int:
+ return random.randint(-100, 100)
+
+ @staticmethod
+ def next_ubyte() -> int:
+ return random.randint(0, 200)
+
+ @staticmethod
+ def next_i16() -> int:
+ return random.randint(-500, 500)
+
+ @staticmethod
+ def next_u16() -> int:
+ return random.randint(0, 1000)
+
+ @staticmethod
+ def next_i32() -> int:
+ return random.randint(-16384, 16384)
+
+ @staticmethod
+ def next_u32(u=16384) -> int:
+ return random.randint(0, u)
+
+ @staticmethod
+ def next_i64() -> int:
+ return random.randint(-32768, 32768)
+
+ @staticmethod
+ def next_u64() -> int:
+ return random.randint(0, 32768)
+
+ @staticmethod
+ def next_f32() -> float:
+ val = random.uniform(-16384.0, 16384.0)
+ return struct.unpack(' float:
+ return random.uniform(-16384.0, 16384.0)
+
+ @staticmethod
+ def next_f128() -> Decimal:
+ value = random.uniform(0, 1)
+ return Decimal(str(round(value, 10)))
+
+ @staticmethod
+ def next_bool() -> bool:
+ return random.choice([True, False])
+
+ @staticmethod
+ def next_string() -> str:
+ return ''.join(random.choices(string.ascii_letters + string.digits, k=20))
+
+ @staticmethod
+ def next_datetime() -> datetime:
+ start = datetime(1970, 1, 1, tzinfo=timezone.utc)
+ end = datetime(2100, 1, 1, tzinfo=timezone.utc)
+
+ delta = end - start
+ random_seconds = random.randrange(int(delta.total_seconds()))
+ return start + timedelta(seconds=random_seconds)
+
+ @staticmethod
+ def next_uuid() -> UUID:
+ return uuid4()
+
+ @staticmethod
+ def next_bytes() -> bytes:
+ return bytes([random.randint(0, 255) for _ in range(16)])
+
+ @staticmethod
+ def next_list(f):
+ return [f() for _ in range(10)]
+
+ @staticmethod
+ def next_set(f):
+ return {f() for _ in range(10)}
+
+ @staticmethod
+ def next_dict(kf, vf):
+ return {kf(): vf() for _ in range(10)}
+
+ @staticmethod
+ def next_optional(vf):
+ return vf()
+
+ @staticmethod
+ def next_random_enum(e):
+ return random.choice(list(e))
+
+ @staticmethod
+ def oneof(l: list[T]) -> T:
+ return random.choice(l)
+
+
+class ConversionKey(BaseModel):
+ from_t: type
+ to: type
+
+ model_config = ConfigDict(
+ frozen=True
+ )
+
+
+class GenericConversion(ABC):
+ pass
+
+
+class BaboonAbstractConversions(GenericConversion):
+ def __init__(self):
+ self.registry = {}
+
+ def register(self, conversion, type_from: type, to_type: type):
+ key = ConversionKey(from_t=type_from, to=to_type)
+ self.registry[key] = conversion
+
+ def convert_with_context(self, context, from_value, type_from, to_type):
+ key = ConversionKey(from_t=type_from, to=to_type)
+ return self.registry[key].do_convert(context, self, from_value)
+
+ @abstractmethod
+ def versions_from(self) -> list[str]:
+ raise NotImplementedError
+
+ @abstractmethod
+ def version_to(self) -> str:
+ raise NotImplementedError
+
+
+class BaboonAbstractConversion(ABC, Generic[From, To]):
+ @abstractmethod
+ def do_convert(self, ctx, conversions: BaboonAbstractConversions, cfrom: From) -> To:
+ raise NotImplementedError
+
+ @staticmethod
+ @abstractmethod
+ def version_from() -> str:
+ raise NotImplementedError
+
+ @staticmethod
+ @abstractmethod
+ def version_to() -> str:
+ raise NotImplementedError
+
+ @staticmethod
+ @abstractmethod
+ def type_id() -> type:
+ raise NotImplementedError
\ No newline at end of file
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/BaboonModule.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/BaboonModule.scala
index 12b180b2..063f7180 100644
--- a/baboon-compiler/src/main/scala/io/septimalmind/baboon/BaboonModule.scala
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/BaboonModule.scala
@@ -5,6 +5,8 @@ import io.septimalmind.baboon.parser.BaboonParser
import io.septimalmind.baboon.translator.BaboonAbstractTranslator
import io.septimalmind.baboon.translator.csharp.*
import io.septimalmind.baboon.translator.csharp.CSCodecFixtureTranslator.CSRandomMethodTranslatorImpl
+import io.septimalmind.baboon.translator.python.*
+import io.septimalmind.baboon.translator.python.PyDefnTranslator.PyDefnTranslatorImpl
import io.septimalmind.baboon.translator.scl.*
import io.septimalmind.baboon.typer.*
import io.septimalmind.baboon.typer.model.*
@@ -47,12 +49,12 @@ class BaboonModuleLogicModule[F[+_, +_]: Error2: MaybeSuspend2: TagKK](
make[BaboonRuntimeCodec[F]].from[BaboonRuntimeCodec.BaboonRuntimeCodecImpl[F]]
}
-class SharedTranspilerModule[F[+_, +_]: Error2: TagKK]() extends ModuleDef {
+class SharedTranspilerModule[F[+_, +_]: Error2: TagKK] extends ModuleDef {
include(new BaboonSharedModule[F])
make[BaboonMetagen].from[BaboonMetagen.BaboonMetagenImpl]
}
-class BaboonCommonCSModule[F[+_, +_]: Error2: TagKK]() extends ModuleDef {
+class BaboonCommonCSModule[F[+_, +_]: Error2: TagKK] extends ModuleDef {
include(new SharedTranspilerModule[F])
makeSubcontext[CSDefnTranslator[F]]
@@ -81,7 +83,7 @@ class BaboonCommonCSModule[F[+_, +_]: Error2: TagKK]() extends ModuleDef {
.ref[CSBaboonTranslator[F]]
}
-class BaboonCommonScModule[F[+_, +_]: Error2: TagKK]() extends ModuleDef {
+class BaboonCommonScModule[F[+_, +_]: Error2: TagKK] extends ModuleDef {
include(new SharedTranspilerModule[F])
makeSubcontext[ScDefnTranslator[F]]
@@ -106,3 +108,29 @@ class BaboonCommonScModule[F[+_, +_]: Error2: TagKK]() extends ModuleDef {
many[BaboonAbstractTranslator[F]]
.ref[ScBaboonTranslator[F]]
}
+
+class BaboonCommonPyModule[F[+_, +_]: Error2: TagKK] extends ModuleDef {
+ include(new SharedTranspilerModule[F])
+
+ makeSubcontext[PyDefnTranslator[F]]
+ .localDependencies(List(DIKey[Domain], DIKey[BaboonEvolution]))
+ .withSubmodule(new ModuleDef {
+ make[PyDefnTranslator[F]].from[PyDefnTranslatorImpl[F]]
+ make[PyDomainTreeTools].from[PyDomainTreeTools.PyDomainTreeToolsImpl]
+ make[PyCodecFixtureTranslator].from[PyCodecFixtureTranslator.PyCodecFixtureTranslatorImpl]
+ make[PyCodecTestTranslator].from[PyCodecTestTranslator.PyCodecTestTranslatorImpl]
+ make[PyJsonCodecGenerator]
+ many[PyCodecTranslator]
+ .add[PyJsonCodecGenerator]
+ .add[PyUEBACodecGenerator]
+ })
+
+ make[PyFileTools].from[PyFileTools.ScFileToolsImpl]
+
+ make[PyTypeTranslator]
+ makeFactory[PyConversionTranslator.Factory[F]]
+
+ make[PyBaboonTranslator[F]].aliased[BaboonAbstractTranslator[F]]
+ many[BaboonAbstractTranslator[F]]
+ .ref[PyBaboonTranslator[F]]
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/CompilerOptions.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/CompilerOptions.scala
index 06105558..274c989d 100644
--- a/baboon-compiler/src/main/scala/io/septimalmind/baboon/CompilerOptions.scala
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/CompilerOptions.scala
@@ -24,8 +24,24 @@ object CompilerTarget {
generic: GenericOptions,
language: ScOptions,
) extends CompilerTarget
+
+ case class PyTarget(
+ id: String,
+ output: OutputOptions,
+ generic: GenericOptions,
+ language: PyOptions,
+ ) extends CompilerTarget
}
+final case class PyOptions(
+ writeEvolutionDict: Boolean,
+ wrappedAdtBranchCodecs: Boolean,
+ generateJsonCodecs: Boolean,
+ generateUebaCodecs: Boolean,
+ generateUebaCodecsByDefault: Boolean,
+ generateJsonCodecsByDefault: Boolean,
+)
+
final case class ScOptions(
writeEvolutionDict: Boolean,
wrappedAdtBranchCodecs: Boolean,
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyBaboonTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyBaboonTranslator.scala
new file mode 100644
index 00000000..18108386
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyBaboonTranslator.scala
@@ -0,0 +1,309 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerProduct
+import io.septimalmind.baboon.CompilerTarget.PyTarget
+import io.septimalmind.baboon.parser.model.issues.{BaboonIssue, TranslationIssue}
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.PyModuleId
+import io.septimalmind.baboon.translator.{BaboonAbstractTranslator, OutputFile, Sources}
+import io.septimalmind.baboon.typer.model.{BaboonFamily, BaboonLineage, Domain, DomainMember, EvolutionStep}
+import izumi.distage.Subcontext
+import izumi.functional.bio.{Error2, F}
+import izumi.fundamentals.collections.nonempty.NEList
+import izumi.fundamentals.platform.resources.IzResources
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.collections.IzCollections.*
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+class PyBaboonTranslator[F[+_, +_]: Error2](
+ translator: Subcontext[PyDefnTranslator[F]],
+ convTransFactory: PyConversionTranslator.Factory[F],
+ target: PyTarget,
+ pyFileTools: PyFileTools,
+ typeTranslator: PyTypeTranslator,
+) extends BaboonAbstractTranslator[F] {
+
+ type Out[T] = F[NEList[BaboonIssue], T]
+
+ override def translate(family: BaboonFamily): Out[Sources] = {
+ for {
+ translated <- translateFamily(family)
+ runtime <- sharedRuntime()
+ rendered = (translated ++ runtime).map {
+ o =>
+ val content = renderTree(o)
+ (o.path, OutputFile(content, o.product))
+ }
+ unique <- F.fromEither(rendered.toUniqueMap(c => BaboonIssue.of(TranslationIssue.NonUniqueOutputFiles(c))))
+ } yield Sources(unique)
+ }
+
+ private def translateFamily(
+ family: BaboonFamily
+ ): Out[List[PyDefnTranslator.Output]] = {
+ F.flatSequenceAccumErrors {
+ family.domains.iterator.map { case (_, lineage) => translateLineage(lineage) }.toList
+ }
+ }
+
+ private def translateLineage(
+ lineage: BaboonLineage
+ ): Out[List[PyDefnTranslator.Output]] = {
+ F.flatSequenceAccumErrors {
+ lineage.versions.iterator.map { case (_, domain) => translateDomain(domain, lineage) }.toList
+ }
+ }
+
+ private def translateDomain(domain: Domain, lineage: BaboonLineage): Out[List[PyDefnTranslator.Output]] = {
+ val evo = lineage.evolution
+ translator.provide(domain).provide(evo).produce().use {
+ defnTranslator =>
+ for {
+ defnSources <- translateProduct(domain, CompilerProduct.Definition, defnTranslator.translate)
+ fixturesSources <- translateProduct(domain, CompilerProduct.Fixture, defnTranslator.translateFixtures)
+ testsSources <- translateProduct(domain, CompilerProduct.Test, defnTranslator.translateTests)
+ initPyFile = if (domain.version == lineage.evolution.latest) Nil else List(genInitPy(defnSources, domain))
+
+ conversionSources <- {
+ if (target.output.products.contains(CompilerProduct.Conversion)) {
+ val evosToCurrent = evo.diffs.keySet.filter(_.to == domain.version)
+ generateConversions(domain, lineage, evosToCurrent, defnSources)
+ } else {
+ F.pure(List.empty)
+ }
+ }
+ meta <- {
+ if (target.language.writeEvolutionDict) {
+ generateMeta(domain, lineage)
+ } else {
+ F.pure(List.empty)
+ }
+ }
+ } yield {
+ defnSources ++
+ conversionSources ++
+ fixturesSources ++
+ testsSources ++
+ meta ++
+ initPyFile
+ }
+ }
+ }
+
+ private def genInitPy(definitions: List[PyDefnTranslator.Output], domain: Domain): PyDefnTranslator.Output = {
+ val modules = definitions.map(o => q"${o.module.module}")
+ val importTree = q"""from . import (
+ | ${modules.join(",\n").shift(4)}
+ |)""".stripMargin
+ val initPyModule = typeTranslator.toPyModule(domain.id)
+ val versionStr = domain.version.format(prefix = "v", delimiter = "_")
+ val fileName = "__init__.py"
+ val path = initPyModule.path ++ List(versionStr, fileName)
+ PyDefnTranslator.Output(
+ path.mkString("/"),
+ importTree,
+ initPyModule,
+ CompilerProduct.Definition,
+ )
+ }
+
+ private def sharedRuntime(): Out[List[PyDefnTranslator.Output]] = {
+ def rt(path: String, resource: String): PyDefnTranslator.Output = {
+ PyDefnTranslator.Output(
+ path,
+ TextTree.text(IzResources.readAsString(resource).get),
+ pyBaboonSharedRuntimeModule,
+ CompilerProduct.Runtime,
+ )
+ }
+
+ if (target.output.products.contains(CompilerProduct.Runtime)) {
+ F.pure(
+ List(
+ rt("baboon_runtime_shared.py", "baboon-runtime/python/baboon_runtime_shared.py"),
+ rt("baboon_codecs.py", "baboon-runtime/python/baboon_codecs.py"),
+ )
+ )
+ } else F.pure(Nil)
+ }
+
+
+ private def renderTree(o: PyDefnTranslator.Output): String = {
+ val usedTypes = o.tree.values.collect { case t: PyValue.PyType => t }
+ .filterNot(_.moduleId == pyBuiltins)
+ .filterNot(_.moduleId == o.module)
+ .distinct
+
+ val (versioned, usual) = usedTypes.partition(_.versioned)
+
+ val versionPkgImports = versioned
+ .map(t => t.moduleId.pathToVersion -> t.moduleId).toMap
+ .map {
+ case (path, module) =>
+ q"from ${path.mkString(".")} import ${module.moduleVersionString.getOrElse("")}"
+ }.toList
+
+ val usualImportsByModule = usual.groupBy(_.moduleId).toList
+ .sortBy { case (moduleId, types) => moduleId.path.size + types.size }.reverse.map {
+ case (module, types) =>
+ if (module == pyBaboonCodecsModule || module == pyBaboonSharedRuntimeModule) {
+ val baseString = pyFileTools.definitionsBasePkg.mkString(".")
+ val typesString = types.map(_.name).mkString(", ")
+ q"from $baseString.${module.module} import $typesString"
+ } else {
+ val typesString = types.map(_.name).mkString(", ")
+ q"from ${module.path.mkString(".")} import $typesString"
+ }
+ }
+
+ val allImports = (usualImportsByModule ++ versionPkgImports).joinN()
+
+ val full = Seq(allImports, o.tree).joinNN()
+
+ full.mapRender {
+ case t: PyValue.PyType => t.name
+ }
+ }
+
+ private def generateConversions(
+ domain: Domain,
+ lineage: BaboonLineage,
+ toCurrent: Set[EvolutionStep],
+ defnOut: List[PyDefnTranslator.Output],
+ ): Out[List[PyDefnTranslator.Output]] = {
+ val module = typeTranslator.toPyModule(domain.id)
+
+ for {
+ conversions <- F.flatSequenceAccumErrors {
+ lineage.evolution.rules
+ .filter(kv => toCurrent.contains(kv._1))
+ .map {
+ case (srcVer, rules) =>
+ convTransFactory(
+ srcDom = lineage.versions(srcVer.from),
+ domain = domain,
+ rules = rules,
+ evolution = lineage.evolution,
+ ).makeConversions
+ }
+ }
+ } yield {
+ val conversionRegs = conversions.map(_.register).toList
+ val missing = conversions.flatMap(_.missing.iterator.toSeq).toSeq
+
+ val missingTree = if (missing.isEmpty) q"pass" else missing.join("\n")
+
+ val converter =
+ q"""class RequiredConversions($pyABC):
+ | ${missingTree.shift(4).trim}
+ |
+ |class BaboonConversions($baboonAbstractConversions):
+ | def __init__(self, required: RequiredConversions):
+ | super().__init__()
+ | self.required = required
+ | ${conversionRegs.join("\n").shift(8).trim}
+ |
+ | def versions_from(self) -> $pyList[$pyStr]:
+ | return [${toCurrent.map(_.from.v.toString).map(v => s"\"$v\"").mkString(", ")}]
+ |
+ | def version_to(self) -> $pyStr:
+ | return "${domain.version.v.toString}"
+ |""".stripMargin
+
+ val regsMap = defnOut.flatMap(_.codecReg).toMultimap.view.mapValues(_.flatten).toMap
+ val codecs = regsMap.map {
+ case (codecid, regs) =>
+ val nme = q"BaboonCodecs${codecid.capitalize}"
+ q"""class $nme (${abstractBaboonCodecs(codecid)}):
+ | def __init__(self):
+ | super().__init__()
+ | ${regs.toList.map(r => q"self.register($r)").join("\n").shift(8).trim}
+ |
+ | @$pyClassMethod
+ | @$pyCache
+ | def instance (cls):
+ | return cls()
+ |""".stripMargin
+ }.toList.join("\n\n")
+
+ val basename = pyFileTools.basename(domain, lineage.evolution)
+
+ val runtimeSource = Seq(converter, codecs).join("\n\n")
+ val runtimeOutput = PyDefnTranslator.Output(
+ s"$basename/baboon_runtime.py",
+ runtimeSource,
+ module,
+ CompilerProduct.Conversion,
+ )
+
+ val convertersOutput = conversions.map {
+ conv =>
+ PyDefnTranslator.Output(
+ s"$basename/${conv.fileName}",
+ conv.conversionTree,
+ module,
+ CompilerProduct.Conversion,
+ )
+ }
+ List(runtimeOutput) ++ convertersOutput
+ }
+ }
+
+ private def generateMeta(domain: Domain, lineage: BaboonLineage): Out[List[PyDefnTranslator.Output]] = {
+ val basename = pyFileTools.basename(domain, lineage.evolution)
+
+ val entries = lineage.evolution
+ .typesUnchangedSince(domain.version)
+ .toList
+ .sortBy(_._1.toString)
+ .map {
+ case (tid, version) =>
+ q""""${tid.toString}": [${version.sameIn.map(_.v.toString).map(s => q"\"$s\"").toList.join(", ")}]"""
+ }
+
+ val metaTree =
+ q"""class BaboonMetadata($baboonMeta):
+ | def __init__(self) -> None:
+ | self.unmodified: dict[str, list[str]] = {
+ | ${entries.join(",\n").shift(12).trim}
+ | }
+ |
+ | def unmodified_since(self, type_id_string: $pyStr) -> $pyList[$pyStr]:
+ | return self.unmodified.get(type_id_string, [])
+ |
+ |""".stripMargin
+
+ val metaOutput = PyDefnTranslator.Output(
+ s"$basename/baboon_metadata.py",
+ metaTree,
+ PyModuleId(NEList.unsafeFrom(domain.id.path.toList :+ "baboon_metadata")),
+ CompilerProduct.Definition,
+ )
+
+ F.pure(List(metaOutput))
+ }
+
+ private def translateProduct(
+ domain: Domain,
+ p: CompilerProduct,
+ translate: DomainMember.User => F[NEList[BaboonIssue], List[PyDefnTranslator.Output]],
+ ): F[NEList[BaboonIssue], List[PyDefnTranslator.Output]] = {
+ if (target.output.products.contains(p)) {
+ F.flatTraverseAccumErrors(domain.defs.meta.nodes.toList) {
+ case (_, defn: DomainMember.User) => translate(defn)
+ case _ => F.pure(List.empty)
+ }
+ } else {
+ F.pure(List.empty)
+ }
+ }
+}
+
+object PyBaboonTranslator {
+ case class RenderedConversion(
+ fileName: String,
+ conversionTree: TextTree[PyValue],
+ register: TextTree[PyValue],
+ missing: Option[TextTree[PyValue]],
+ )
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecFixtureTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecFixtureTranslator.scala
new file mode 100644
index 00000000..0953f71e
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecFixtureTranslator.scala
@@ -0,0 +1,129 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.translator.python.PyTypes.{baboonFixture, pyList, pyStaticMethod}
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.BaboonEnquiries
+import io.septimalmind.baboon.typer.model.{BaboonEvolution, Domain, DomainMember, TypeId, TypeRef, Typedef}
+import io.septimalmind.baboon.typer.model.TypeId.Builtins
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+trait PyCodecFixtureTranslator {
+ def translate(defn: DomainMember.User): Option[TextTree[PyValue]]
+ def fixtureType(tid: TypeId.User): PyType
+}
+
+object PyCodecFixtureTranslator {
+ final class PyCodecFixtureTranslatorImpl(
+ typeTranslator: PyTypeTranslator,
+ enquiries: BaboonEnquiries,
+ evolution: BaboonEvolution,
+ pyFileTools: PyFileTools,
+ domain: Domain,
+ ) extends PyCodecFixtureTranslator {
+ override def translate(defn: DomainMember.User): Option[TextTree[PyValue]] = {
+ defn.defn match {
+ case _ if enquiries.hasForeignType(defn, domain) => None
+ case _ if enquiries.isRecursiveTypedef(defn, domain) => None
+ case _: Typedef.Contract => None
+ case _: Typedef.Enum => None
+ case _: Typedef.Foreign => None
+ case _: Typedef.Service => None
+ case dto: Typedef.Dto => Some(doTranslateDto(dto))
+ case adt: Typedef.Adt => Some(doTranslateAdt(adt))
+ }
+ }
+
+ private def doTranslateDto(dto: Typedef.Dto): TextTree[PyValue] = {
+ val generatedFields = dto.fields.map(f => q"${f.name.name}=${genType(f.tpe)}")
+ val dtoType = typeTranslator
+ .asPyType(dto.id, domain, evolution, pkgBase = pyFileTools.definitionsBasePkg)
+
+ q"""class ${dto.id.name.name.capitalize}_Fixture:
+ | @$pyStaticMethod
+ | def random() -> $dtoType:
+ | return $dtoType(
+ | ${generatedFields.join(",\n").shift(12).trim}
+ | )
+ |""".stripMargin
+ }
+
+ private def doTranslateAdt(adt: Typedef.Adt): TextTree[PyValue] = {
+ val members = adt.members.toList
+ .flatMap(m => domain.defs.meta.nodes.get(m))
+ .collect { case DomainMember.User(_, d: Typedef.Dto, _, _) => d }
+
+ val membersFixtures = members.sortBy(_.id.toString).map(dto => doTranslateDto(dto))
+ val membersGenerators = members.sortBy(_.id.toString).map(dto => q"${dto.id.name.name}_Fixture.random()")
+
+ val adtType = typeTranslator
+ .asPyType(adt.id, domain, evolution, pkgBase = pyFileTools.definitionsBasePkg)
+
+ q"""class ${adt.id.name.name}_Fixture:
+ | @$pyStaticMethod
+ | def random() -> $adtType:
+ | return $baboonFixture.oneof(${adt.id.name.name}_Fixture.random_all())
+ |
+ | @$pyStaticMethod
+ | def random_all() -> $pyList[$adtType]:
+ | return [
+ | ${membersGenerators.join(",\n").shift(12).trim}
+ | ]
+ |
+ |${membersFixtures.joinN().trim}
+ |""".stripMargin
+ }
+
+ private def genType(tpe: TypeRef): TextTree[PyValue] = {
+ tpe match {
+ case s: TypeRef.Scalar =>
+ s.id match {
+ case TypeId.Builtins.i08 => q"$baboonFixture.next_byte()"
+ case TypeId.Builtins.i16 => q"$baboonFixture.next_i16()"
+ case TypeId.Builtins.i32 => q"$baboonFixture.next_i32()"
+ case TypeId.Builtins.i64 => q"$baboonFixture.next_i64()"
+
+ case TypeId.Builtins.u08 => q"$baboonFixture.next_ubyte()"
+ case TypeId.Builtins.u16 => q"$baboonFixture.next_u16()"
+ case TypeId.Builtins.u32 => q"$baboonFixture.next_u32()"
+ case TypeId.Builtins.u64 => q"$baboonFixture.next_u64()"
+
+ case TypeId.Builtins.f32 => q"$baboonFixture.next_f32()"
+ case TypeId.Builtins.f64 => q"$baboonFixture.next_f64()"
+ case TypeId.Builtins.f128 => q"$baboonFixture.next_f128()"
+
+ case TypeId.Builtins.str => q"$baboonFixture.next_string()"
+ case TypeId.Builtins.uid => q"$baboonFixture.next_uuid()"
+ case TypeId.Builtins.tsu => q"$baboonFixture.next_datetime()"
+ case TypeId.Builtins.tso => q"$baboonFixture.next_datetime()"
+
+ case TypeId.Builtins.bit => q"$baboonFixture.next_bool()"
+
+ case TypeId.Builtins.bytes => q"$baboonFixture.next_bytes()"
+
+ case id: TypeId.User if enquiries.isEnum(tpe, domain) =>
+ val tpe = typeTranslator.asPyType(id, domain, evolution, pyFileTools.definitionsBasePkg)
+ q"$baboonFixture.next_random_enum($tpe)"
+ case u: TypeId.User => q"${fixtureType(u)}.random()"
+ case t => throw new IllegalArgumentException(s"Unexpected scalar type: $t")
+ }
+ case TypeRef.Constructor(id, args) =>
+ id match {
+ case Builtins.lst => q"$baboonFixture.next_list(lambda: ${genType(args.head)})"
+ case Builtins.set => q"$baboonFixture.next_set(lambda: ${genType(args.head)})"
+ case Builtins.map => q"$baboonFixture.next_dict(lambda: ${genType(args.head)}, lambda: ${genType(args.last)})"
+ case Builtins.opt => q"$baboonFixture.next_optional(lambda: ${genType(args.head)})"
+ case t => throw new IllegalArgumentException(s"Unexpected scalar type: $t")
+ }
+ }
+ }
+
+ override def fixtureType(tid: TypeId.User): PyType = {
+ val typeName = s"${tid.name.name}_Fixture"
+ val pyModuleId = typeTranslator
+ .toPyModule(tid, domain.version, evolution, pyFileTools.fixturesBasePkg)
+ .withModuleName(typeName)
+ PyType(pyModuleId, typeName)
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTestTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTestTranslator.scala
new file mode 100644
index 00000000..8d695ef6
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTestTranslator.scala
@@ -0,0 +1,155 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerTarget.PyTarget
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.BaboonEnquiries
+import io.septimalmind.baboon.typer.model.{BaboonEvolution, Domain, DomainMember, Typedef}
+import io.septimalmind.baboon.util.BLogger
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+trait PyCodecTestTranslator {
+ def translate(defn: DomainMember.User, pyRef: PyType, srcRef: PyType): Option[TextTree[PyValue]]
+}
+
+object PyCodecTestTranslator {
+ final class PyCodecTestTranslatorImpl(
+ fixtureTranslator: PyCodecFixtureTranslator,
+ typeTranslator: PyTypeTranslator,
+ codecs: Set[PyCodecTranslator],
+ enquiries: BaboonEnquiries,
+ evolution: BaboonEvolution,
+ pyFileTools: PyFileTools,
+ pyTarget: PyTarget,
+ logger: BLogger,
+ domain: Domain,
+ ) extends PyCodecTestTranslator {
+ override def translate(
+ defn: DomainMember.User,
+ pyRef: PyType,
+ srcRef: PyType,
+ ): Option[TextTree[PyValue]] = {
+ val isLatestVersion = domain.version == evolution.latest
+
+ defn match {
+ case d if enquiries.hasForeignType(d, domain) => None
+ case d if enquiries.isRecursiveTypedef(d, domain) => None
+ case d if d.defn.isInstanceOf[Typedef.NonDataTypedef] => None
+ case _ if !isLatestVersion => None
+ case _ =>
+ val testClass =
+ q"""class Test_${srcRef.name}_Tests($pyTestCase):
+ | ${makeTest(defn, srcRef)}
+ |""".stripMargin
+ Some(testClass)
+ }
+ }
+
+ private def makeTest(defn: DomainMember.User, srcRef: PyType): TextTree[PyValue] = {
+ val fixture = makeFixture(defn)
+ codecs.map {
+ case jsonCodec: PyJsonCodecGenerator =>
+ val codec = jsonCodec.codecType(defn.id)
+ val body = jsonCodecAssertions(defn)
+ q"""def test_json_codec(self):
+ | for _ in range(${pyTarget.generic.codecTestIterations.toString}):
+ | self.json_codec_test_impl()
+ |
+ |def test_load_json_produced_by_cs_codecs(self):
+ | self.cs_json_test("default")
+ |
+ |def cs_json_test(self, clue):
+ | tpeid = "${defn.id.render}"
+ | with open(f"../target/cs/json-{clue}/{tpeid}.json", encoding="utf-8") as f:
+ | cs_json = f.read()
+ | decoded = $codec.instance().decode(cs_json)
+ | self.json_compare(decoded)
+ |
+ |def json_codec_test_impl(self):
+ | ${fixture.shift(4).trim}
+ | ${body.shift(4).trim}
+ |
+ |def json_compare(self, fixture):
+ | fixtureJson = $codec.instance().encode(fixture)
+ | fixtureDecoded = $codec.instance().decode(fixtureJson)
+ | self.assertEqual(fixture, fixtureDecoded)
+ |""".stripMargin
+ case uebaCodec: PyUEBACodecGenerator =>
+ val codec = uebaCodec.codecType(defn.id)
+ val body = uebaCodecAssertions(defn)
+ q"""def test_ueba_codec(self):
+ | for _ in range(${pyTarget.generic.codecTestIterations.toString}):
+ | self.ueba_codec_test_impl($baboonCodecContext.default())
+ |
+ |def ueba_codec_test_impl(self, context):
+ | ${fixture.shift(4).trim}
+ | ${body.shift(4).trim}
+ |
+ |def test_ueba_produced_by_cs_codecs(self):
+ | self.cs_ueba_test($baboonCodecContext.indexed(), "indexed")
+ | self.cs_ueba_test($baboonCodecContext.compact(), "compact")
+ |
+ |def cs_ueba_test(self, context, clue):
+ | tpeid = "${defn.id.render}"
+ | with open(f"../target/cs/ueba-{clue}/{tpeid}.uebin", "rb") as f:
+ | cs_uebin = f.read()
+ | memory_stream = $pyBytesIO()
+ | input_stream = $baboonLEDataInputStream(memory_stream)
+ | memory_stream.write(cs_uebin)
+ | memory_stream.seek(0)
+ | decoded = $codec.instance().decode(context, input_stream)
+ | self.ueba_compare(context, decoded)
+ |
+ |def ueba_compare(self, context, obj):
+ | memory_stream = $pyBytesIO()
+ | output_stream = $baboonLEDataOutputStream(memory_stream)
+ | $codec.instance().encode(context, output_stream, obj)
+ | memory_stream.seek(0)
+ | input_stream = $baboonLEDataInputStream(memory_stream)
+ | decoded = $codec.instance().decode(context, input_stream)
+ | self.assertEqual(obj, decoded)
+ |
+ |""".stripMargin
+ case unknown =>
+ logger.message(s"Cannot create codec tests (${unknown.id}) for unsupported type $srcRef")
+ q""
+ }.toList.map(_.stripMargin.trim).joinNN().shift(4).trim
+ }
+
+ private def jsonCodecAssertions(defn: DomainMember.User): TextTree[PyValue] = {
+ defn.defn match {
+ case _: Typedef.Adt =>
+ q"""for fixture in fixtures:
+ | self.json_compare(fixture)
+ |""".stripMargin
+ case _ => q"self.json_compare(fixture)"
+ }
+ }
+
+ private def uebaCodecAssertions(defn: DomainMember.User): TextTree[PyValue] = {
+ defn.defn match {
+ case _: Typedef.Adt =>
+ q"""for fixture in fixtures:
+ | self.ueba_compare(context, fixture)
+ |""".stripMargin.trim
+ case _ =>
+ q"self.ueba_compare(context, fixture)"
+ }
+ }
+
+ private def makeFixture(defn: DomainMember.User): TextTree[PyValue] = {
+ defn.defn match {
+ case _: Typedef.Enum =>
+ val enumTpe = typeTranslator.asPyType(defn.id, domain, evolution, pyFileTools.definitionsBasePkg)
+ q"fixture = $baboonFixture.next_random_enum($enumTpe)"
+ case _: Typedef.Adt =>
+ val fixtureType = fixtureTranslator.fixtureType(defn.id)
+ q"fixtures = $fixtureType.random_all()"
+ case _ =>
+ val fixtureType = fixtureTranslator.fixtureType(defn.id)
+ q"fixture = $fixtureType.random()"
+ }
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTranslator.scala
new file mode 100644
index 00000000..e8a41c6a
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyCodecTranslator.scala
@@ -0,0 +1,18 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.translator.python.PyCodecTranslator.CodecMeta
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.model.{DomainMember, TypeId}
+import izumi.fundamentals.platform.strings.TextTree
+
+trait PyCodecTranslator {
+ def translate(defn: DomainMember.User, pyRef: PyType, srcRef: PyType): Option[TextTree[PyValue]]
+ def codecType(tid: TypeId.User): PyType
+ def codecMeta(tid: TypeId.User): CodecMeta
+ def isActive(id: TypeId): Boolean
+ def id: String
+}
+
+object PyCodecTranslator {
+ case class CodecMeta(member: TextTree[PyValue])
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyConversionTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyConversionTranslator.scala
new file mode 100644
index 00000000..05a262dc
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyConversionTranslator.scala
@@ -0,0 +1,366 @@
+package io.septimalmind.baboon.translator.python
+
+import distage.Id
+import io.septimalmind.baboon.parser.model.issues.{BaboonIssue, TranslationIssue}
+import io.septimalmind.baboon.translator.python.PyBaboonTranslator.RenderedConversion
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.{PyModuleId, PyType}
+import io.septimalmind.baboon.typer.model.*
+import io.septimalmind.baboon.typer.model.Conversion.FieldOp
+import izumi.functional.bio.{Error2, F}
+import izumi.fundamentals.collections.nonempty.NEList
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+import scala.util.chaining.scalaUtilChainingOps
+
+object PyConversionTranslator {
+ trait Factory[F[+_, +_]] {
+ def apply(
+ srcDom: Domain @Id("source"),
+ domain: Domain @Id("current"),
+ rules: BaboonRuleset,
+ evolution: BaboonEvolution,
+ ): PyConversionTranslator[F]
+ }
+}
+
+final class PyConversionTranslator[F[+_, +_]: Error2](
+ srcDom: Domain @Id("source"),
+ domain: Domain @Id("current"),
+ rules: BaboonRuleset,
+ evolution: BaboonEvolution,
+ typeTranslator: PyTypeTranslator,
+ pyFileTools: PyFileTools,
+) {
+ private val sourceVersion = srcDom.version
+ private val currentVersion = domain.version
+
+ private val sourceVersionString = sourceVersion.format(delimiter = "_")
+ private val currentVersionString = currentVersion.format(prefix = "v", delimiter = "_")
+
+ type Out[T] = F[NEList[BaboonIssue], T]
+
+ def makeConversions: Out[List[RenderedConversion]] = {
+ F.flatTraverseAccumErrors(rules.conversions) {
+ case _: Conversion.RemovedTypeNoConversion => F.pure(Nil)
+ case _: Conversion.NonDataTypeTypeNoConversion => F.pure(Nil)
+ case conversion =>
+ val convType = conversionType(conversion)
+ val fileName = s"${convType.moduleId.module}.py"
+
+ val typeFrom = typeTranslator.asPyTypeVersioned(conversion.sourceTpe, srcDom, evolution, pyFileTools.definitionsBasePkg)
+ def typeTo = typeTranslator.asPyType(conversion.sourceTpe, domain, evolution, pyFileTools.definitionsBasePkg)
+
+ val meta =
+ q"""@$pyStaticMethod
+ |def version_from() -> str:
+ | return "${sourceVersion.v.toString}"
+ |
+ |@$pyStaticMethod
+ |def version_to() -> str:
+ | return "${domain.version.v.toString}"
+ |
+ |@$pyStaticMethod
+ |def type_id() -> str:
+ | return "${conversion.sourceTpe.toString}"
+ |
+ |@$pyClassMethod
+ |@$pyCache
+ |def instance (cls):
+ | return cls()
+ |""".stripMargin
+
+ val convTree = genConversionTree(conversion, typeFrom, typeTo, convType, meta)
+ val registerTree = genRegisterTree(conversion, typeFrom, typeTo, convType)
+ val abstractConversion = genAbstractConversion(conversion, convType)
+
+ convTree
+ .zip(registerTree)
+ .fold(F.pure(List.empty[RenderedConversion])) { case (conv, reg) => F.pure(List(RenderedConversion(fileName, conv, reg, abstractConversion))) }
+ .catchSome {
+ case _ => F.fail(BaboonIssue.of(TranslationIssue.TranslationBug()))
+ }
+ }
+ }
+
+ private def genRegisterTree(
+ conversion: Conversion,
+ typeFrom: PyType,
+ typeTo: PyType,
+ convType: PyType,
+ ): Option[TextTree[PyType]] = {
+ conversion match {
+ case _: Conversion.CustomConversionRequired => Some(q"self.register(required.${convType.name}(), $typeFrom, $typeTo)")
+ case _: Conversion.CopyEnumByName | _: Conversion.CopyAdtBranchByName | _: Conversion.DtoConversion =>
+ Some(q"self.register($convType.instance(), $typeFrom, $typeTo)")
+
+ case _ => None
+ }
+ }
+
+ private def genAbstractConversion(conversion: Conversion, convType: PyType): Option[TextTree[PyType]] = {
+ conversion match {
+ case _: Conversion.CustomConversionRequired =>
+ Some(
+ q"""@$pyAbstractMethod
+ |def ${convType.name}(self):
+ | raise NotImplementedError
+ |""".stripMargin
+ )
+ case _ => None
+ }
+ }
+
+ private def genConversionTree(
+ conversion: Conversion,
+ typeFrom: PyType,
+ typeTo: PyType,
+ convType: PyType,
+ meta: TextTree[PyType],
+ ): Option[TextTree[PyValue]] = {
+ conversion match {
+ case _: Conversion.CopyEnumByName =>
+ Some(q"""class ${convType.name}($pyABC, $baboonAbstractConversion[$typeFrom, $typeTo]):
+ | @$pyOverride
+ | def do_convert(self, ctx, conversions, _from: $typeFrom) -> $typeTo:
+ | return $typeTo[_from.name]
+ |
+ | ${meta.shift(4).trim}
+ |""".stripMargin)
+
+ case c: Conversion.CopyAdtBranchByName =>
+ val memberCases = c.oldDefn
+ .dataMembers(srcDom)
+ .map(tid => tid -> typeTranslator.asPyTypeVersioned(tid, srcDom, evolution, pyFileTools.definitionsBasePkg))
+ .map {
+ case (oldTypeId, oldType) =>
+ q"""case $oldType():
+ | return ${transfer(TypeRef.Scalar(oldTypeId), q"_from")}
+ |""".stripMargin
+ }
+ val defaultCase = q"""case other:
+ | raise ValueError(f"Bad input: {other}")
+ |""".stripMargin
+
+ val cases = memberCases :+ defaultCase
+
+ Some(q"""class ${convType.name}($baboonAbstractConversion[$typeFrom, $typeTo]):
+ | @$pyOverride
+ | def do_convert(self, ctx, conversions: $baboonAbstractConversions, _from: $typeFrom) -> $typeTo:
+ | match _from:
+ | ${cases.joinN().shift(12).trim}
+ |
+ | ${meta.shift(4).trim}
+ |""".stripMargin)
+
+ case c: Conversion.DtoConversion =>
+ val dtoDefn = domain.defs.meta.nodes(c.sourceTpe) match {
+ case DomainMember.User(_, d: Typedef.Dto, _, _) => d
+ case _ => throw new IllegalStateException("DTO expected")
+ }
+ val ops = c.ops.map(o => o.targetField -> o).toMap
+ val assigns = dtoDefn.fields.map {
+ field =>
+ val op = ops(field)
+ val fieldName = field.name.name
+ val fieldRef = q"_from.$fieldName"
+ val expr = op match {
+ case o: FieldOp.Transfer => transfer(o.targetField.tpe, fieldRef)
+
+ case o: FieldOp.InitializeWithDefault =>
+ o.targetField.tpe match {
+ case TypeRef.Constructor(id, args) =>
+ id match {
+ case TypeId.Builtins.lst =>
+ q"$pyList[${typeTranslator.asPyRef(args.head, domain, evolution)}]()"
+ case TypeId.Builtins.set =>
+ q"$pySet[${typeTranslator.asPyRef(args.head, domain, evolution)}]()"
+ case TypeId.Builtins.map =>
+ q"$pyDict[${typeTranslator.asPyRef(args.head, domain, evolution)}, ${typeTranslator.asPyRef(args.last, domain, evolution)}]()"
+ case TypeId.Builtins.opt => q"None"
+ case _ => throw new IllegalStateException(s"Unsupported constructor type: $id")
+ }
+ case _ => throw new IllegalStateException("Unsupported target field type")
+ }
+
+ case o: FieldOp.WrapIntoCollection =>
+ o.newTpe.id match {
+ case TypeId.Builtins.opt => fieldRef
+ case TypeId.Builtins.set => q"{$fieldRef}"
+ case TypeId.Builtins.lst => q"[$fieldRef]"
+ case _ => throw new Exception()
+ }
+
+ case o: FieldOp.ExpandPrecision => transfer(o.newTpe, q"$fieldRef")
+
+ case o: FieldOp.SwapCollectionType => swapCollType(q"$fieldRef", o)
+ }
+ val fieldType = asVersionedIfUserTpe(field.tpe)
+ q"${field.name.name.toLowerCase}: $fieldType = $expr"
+ }
+ val ctorArgs = dtoDefn.fields.map(f => q"${f.name.name.toLowerCase}")
+ Some(q"""class ${convType.name}($baboonAbstractConversion[$typeFrom, $typeTo]):
+ | @$pyOverride
+ | def do_convert(self, ctx, conversions: $baboonAbstractConversions, _from: $typeFrom) -> $typeTo:
+ | ${assigns.join("\n").shift(8).trim}
+ | return $typeTo(
+ | ${ctorArgs.zip(dtoDefn.fields).map { case (a, f) => q"${f.name.name}=$a" }.join(",\n").shift(12).trim}
+ | )
+ |
+ | ${meta.shift(4).trim}
+ |""".stripMargin.trim)
+
+ case _: Conversion.CustomConversionRequired =>
+ Some(q"""class ${convType.name}($baboonAbstractConversion[$typeFrom, $typeTo]):
+ |
+ | ${meta.shift(4).trim}
+ |""".stripMargin)
+
+ case _ => None
+ }
+ }
+
+ private def swapCollType(fieldRef: TextTree[PyValue], op: FieldOp.SwapCollectionType): TextTree[PyValue] = {
+ val TypeRef.Constructor(oldId, oldArgs) = op.oldTpe
+ val TypeRef.Constructor(newId, newArgs) = op.newTpe
+ val tmp = q"v"
+
+ (oldId, newId) match {
+ case (TypeId.Builtins.opt, TypeId.Builtins.lst) =>
+ q"[] if $fieldRef is None else [${transfer(newArgs.head, fieldRef, Some(oldArgs.head))}]"
+ case (TypeId.Builtins.opt, TypeId.Builtins.set) =>
+ q"{} if $fieldRef is None else {${transfer(newArgs.head, fieldRef, Some(oldArgs.head))}}"
+ case (TypeId.Builtins.opt, TypeId.Builtins.opt) =>
+ q"None if $fieldRef is None else ${transfer(newArgs.head, fieldRef, Some(oldArgs.head))}"
+
+ case (TypeId.Builtins.lst, TypeId.Builtins.lst) =>
+ q"[${transfer(newArgs.head, tmp, Some(oldArgs.head))} for v in range(len($fieldRef))]"
+ case (TypeId.Builtins.lst, TypeId.Builtins.set) =>
+ q"{${transfer(newArgs.head, tmp, Some(oldArgs.head))} for v in range(len($fieldRef))}"
+
+ case (TypeId.Builtins.set, TypeId.Builtins.lst) =>
+ q"[${transfer(newArgs.head, tmp, Some(oldArgs.head))} for v in range(len($fieldRef))]"
+ case (TypeId.Builtins.set, TypeId.Builtins.set) =>
+ q"{${transfer(newArgs.head, tmp, Some(oldArgs.head))} for v in range(len($fieldRef))}"
+
+ case (TypeId.Builtins.map, TypeId.Builtins.map) =>
+ val keyRef = q"k"
+ val valRef = q"v"
+ q"{${transfer(newArgs.head, keyRef, Some(oldArgs.head))}: ${transfer(newArgs.last, valRef, Some(oldArgs.last))} for k,v in $fieldRef.items()}"
+ case _ =>
+ throw new IllegalStateException("Unsupported collection swap")
+ }
+ }
+
+ private def transfer(
+ newTpe: TypeRef,
+ oldRef: TextTree[PyValue],
+ maybeOldTpe: Option[TypeRef] = None,
+ ): TextTree[PyValue] = {
+ val oldTpe = maybeOldTpe.getOrElse(newTpe)
+ val newTypeRefTree = typeTranslator.asPyRef(newTpe, domain, evolution, pyFileTools.definitionsBasePkg)
+ val oldTypeRefTree = asVersionedIfUserTpe(oldTpe)
+
+ (newTpe, oldTpe) match {
+ case (c: TypeRef.Constructor, s: TypeRef.Scalar) =>
+ val headTransfer = transfer(c.args.head, oldRef, Some(s))
+ c.id match {
+ case TypeId.Builtins.opt => q"$headTransfer"
+ case TypeId.Builtins.lst => q"[$headTransfer]"
+ case TypeId.Builtins.set => q"{$headTransfer}"
+ case _ => throw new IllegalStateException(s"Unsupported constructor type: ${c.id}")
+ }
+
+ case (ns: TypeRef.Scalar, os: TypeRef.Scalar) =>
+ transferScalar(oldRef, newTypeRefTree, oldTypeRefTree, ns, os)
+ case (TypeRef.Scalar(_), c: TypeRef.Constructor) =>
+ throw new IllegalStateException(s"Unsupported scalar to constructor conversion: ${c.id}")
+ case (cn: TypeRef.Constructor, co: TypeRef.Constructor) =>
+ transferConstructor(oldRef, cn, co)
+
+ }
+ }
+
+ private def transferConstructor(
+ oldRef: TextTree[PyValue],
+ cn: TypeRef.Constructor,
+ co: TypeRef.Constructor,
+ ): TextTree[PyValue] = {
+ val tmp = q"v"
+ cn match {
+ case c: TypeRef.Constructor if c.id == TypeId.Builtins.lst =>
+ q"[${transfer(c.args.head, tmp, Some(co.args.head))} for v in $oldRef]"
+
+ case c: TypeRef.Constructor if c.id == TypeId.Builtins.map =>
+ val keyRef = c.args.head
+ val valueRef = c.args.last
+ val kv = q"k"
+ val vv = q"v"
+
+ q"{${transfer(keyRef, kv, Some(co.args.head))}: ${transfer(valueRef, vv, Some(co.args.last))} for k,v in $oldRef.items()}"
+ case c: TypeRef.Constructor if c.id == TypeId.Builtins.set =>
+ q"{${transfer(c.args.head, tmp, Some(co.args.head))} for v in $oldRef}"
+ case c: TypeRef.Constructor if c.id == TypeId.Builtins.opt =>
+ q"None if $oldRef is None else ${transfer(c.args.head, oldRef, Some(co.args.head))}"
+ case c => throw new IllegalStateException(s"Unsupported constructor type: ${c.id}")
+ }
+ }
+
+ private def transferScalar(
+ oldRef: TextTree[PyValue],
+ newTypeRefTree: TextTree[PyValue],
+ oldTypeRefTree: TextTree[PyValue],
+ newScalar: TypeRef.Scalar,
+ oldScalar: TypeRef.Scalar,
+ ): TextTree[PyValue] = {
+ val direct = if (newScalar == oldScalar) oldRef else q"$newTypeRefTree($oldRef)"
+ val conv = q"conversions.convert_with_context(ctx, $oldRef, $oldTypeRefTree, $newTypeRefTree)"
+
+ newScalar.id match {
+ case _: TypeId.Builtin => direct
+ case id: TypeId.User =>
+ domain.defs.meta.nodes(id) match {
+ case DomainMember.User(_, _: Typedef.Foreign, _, _) => direct
+ case _ => conv
+ }
+ }
+ }
+
+ private def asVersionedIfUserTpe(typeRef: TypeRef) = {
+ typeRef.id match {
+ case u: TypeId.User => typeTranslator.asPyTypeVersioned(u, srcDom, evolution, pyFileTools.definitionsBasePkg).pipe(t => q"$t")
+ case _ => typeTranslator.asPyRef(typeRef, domain, evolution, pyFileTools.definitionsBasePkg)
+ }
+ }
+
+ private def makeName(conversion: Conversion): String = {
+ val nameParts =
+ Seq("Convert") ++
+ conversion.sourceTpe.owner.asPseudoPkg ++
+ Seq(conversion.sourceTpe.name.name, "From", sourceVersionString)
+
+ nameParts.mkString("__")
+ }
+
+ private def conversionType(conversion: Conversion): PyType = {
+ val className = makeName(conversion)
+
+ val moduleName = Seq(
+ "from",
+ sourceVersionString,
+ conversion.sourceTpe.owner.asPseudoPkg.mkString("_"),
+ s"${conversion.sourceTpe.name.name}",
+ ).mkString("_")
+
+ val versionPathPart = if (domain.version == evolution.latest) Nil else List(currentVersionString)
+ val convModuleId = PyModuleId(
+ NEList.unsafeFrom(
+ pyFileTools.definitionsBasePkg ++
+ domain.id.path ++
+ versionPathPart :+ moduleName
+ )
+ )
+ PyType(convModuleId, className)
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDefnTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDefnTranslator.scala
new file mode 100644
index 00000000..f730a817
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDefnTranslator.scala
@@ -0,0 +1,379 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerProduct
+import io.septimalmind.baboon.parser.model.issues.BaboonIssue
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.BaboonEnquiries
+import io.septimalmind.baboon.typer.model.*
+import izumi.functional.bio.{Applicative2, F}
+import izumi.fundamentals.collections.nonempty.NEList
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+trait PyDefnTranslator[F[+_, +_]] {
+ def translate(defn: DomainMember.User): F[NEList[BaboonIssue], List[PyDefnTranslator.Output]]
+ def translateFixtures(defn: DomainMember.User): F[NEList[BaboonIssue], List[PyDefnTranslator.Output]]
+ def translateTests(defn: DomainMember.User): F[NEList[BaboonIssue], List[PyDefnTranslator.Output]]
+}
+
+object PyDefnTranslator {
+ final case class CodecReg(
+ typeId: TypeId,
+ tpe: PyType,
+ tpeKeepForeigns: PyType,
+ tpeId: TextTree[PyValue],
+ trees: Map[String, TextTree[PyValue]],
+ )
+
+ final case class Output(
+ path: String,
+ tree: TextTree[PyValue],
+ module: PyValue.PyModuleId,
+ product: CompilerProduct,
+ codecReg: List[(String, List[TextTree[PyValue]])] = List.empty,
+ )
+
+ final case class PyDefnRepr(
+ defn: TextTree[PyValue],
+ codecs: List[CodecReg],
+ )
+
+ final class PyDefnTranslatorImpl[F[+_, +_]: Applicative2](
+ codecsFixture: PyCodecFixtureTranslator,
+ codecsTests: PyCodecTestTranslator,
+ typeTranslator: PyTypeTranslator,
+ baboonEnquiries: BaboonEnquiries,
+ codecs: Set[PyCodecTranslator],
+ pyDomTrees: PyDomainTreeTools,
+ evolution: BaboonEvolution,
+ fileTools: PyFileTools,
+ domain: Domain,
+ ) extends PyDefnTranslator[F] {
+ override def translate(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ defn.id.owner match {
+ case Owner.Adt(_) => F.pure(List.empty)
+ case _ => doTranslate(defn)
+ }
+ }
+
+ override def translateFixtures(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ defn.id.owner match {
+ case Owner.Adt(_) => F.pure(List.empty)
+ case _ => doTranslateFixtures(defn)
+ }
+ }
+
+ override def translateTests(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ defn.id.owner match {
+ case Owner.Adt(_) => F.pure(List.empty)
+ case _ => doTranslateTests(defn)
+ }
+ }
+
+ private def doTranslateTests(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ val codecsTestsOut = codecsTests
+ .translate(
+ defn,
+ typeTranslator.asPyType(defn.id, domain, evolution, fileTools.definitionsBasePkg),
+ typeTranslator.asPyTypeKeepForeigns(defn.id, domain, evolution, fileTools.definitionsBasePkg),
+ ).map(
+ codecsTest =>
+ Output(
+ getOutputPath(defn, prefix = Some("test_")),
+ codecsTest,
+ typeTranslator.toPyModule(defn.id, domain.version, evolution, fileTools.testsBasePkg),
+ CompilerProduct.Test,
+ )
+ )
+
+ F.pure(codecsTestsOut.toList)
+ }
+
+ private def doTranslateFixtures(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ val fixtureTree = codecsFixture
+ .translate(defn).map(
+ fixture =>
+ Output(
+ getOutputPath(defn, suffix = Some("_Fixture")),
+ fixture,
+ typeTranslator.toPyModule(defn.id, domain.version, evolution, fileTools.fixturesBasePkg),
+ CompilerProduct.Fixture,
+ )
+ )
+ F.pure(fixtureTree.toList)
+ }
+
+ private def doTranslate(defn: DomainMember.User): F[NEList[BaboonIssue], List[Output]] = {
+ val repr = makeFullRepr(defn)
+
+ val regsPerCodec = codecs.toList.map(codecTranslator =>
+ (codecTranslator.id, repr.codecs.flatMap(reg => reg.trees.get(codecTranslator.id).map(expr => q"${reg.tpeId}, $expr")))
+ )
+
+ F.pure(
+ List(
+ Output(
+ getOutputPath(defn),
+ repr.defn,
+ typeTranslator.toPyModule(defn.id, domain.version, evolution, fileTools.definitionsBasePkg),
+ CompilerProduct.Definition,
+ codecReg = regsPerCodec,
+ )
+ )
+ )
+ }
+
+ private def makeFullRepr(defn: DomainMember.User): PyDefnRepr = {
+ val isLatestVersion = domain.version == evolution.latest
+
+ def obsoletePrevious(tree: TextTree[PyValue]): TextTree[PyValue] = {
+ if (isLatestVersion || tree.isEmpty) tree
+ else {
+ q"""@$deprecated("Version ${domain.version.toString} is obsolete, you should migrate to ${evolution.latest.toString}")
+ |$tree""".stripMargin
+ }
+ }
+
+ val pyRef = typeTranslator.asPyType(defn.id, domain, evolution, fileTools.definitionsBasePkg)
+ val srcRef = typeTranslator.asPyTypeKeepForeigns(defn.id, domain, evolution, fileTools.definitionsBasePkg)
+
+ val repr = mkRepr(defn, isLatestVersion)
+ val defnRepr = List(obsoletePrevious(repr.defn))
+ val codecTrees = codecs.toList.flatMap(_.translate(defn, pyRef, srcRef)).map(obsoletePrevious)
+ val allDefs = (defnRepr ++ codecTrees).joinNN()
+
+ val reg = defn.defn match {
+ case _: Typedef.NonDataTypedef => List.empty[CodecReg]
+ case d =>
+ val codecsReg = codecs.toList
+ .sortBy(_.getClass.getName)
+ .flatMap {
+ codec =>
+ if (codec.isActive(d.id)) {
+ List(codec.id -> q"${codec.codecType(defn.id)}.instance()")
+ } else {
+ List.empty
+ }
+ }
+ List(CodecReg(defn.id, pyRef, srcRef, q"\"${defn.id.toString}\"", codecsReg.toMap))
+ }
+
+ val allRegs = reg ++ repr.codecs
+
+ assert(defn.id.pkg == domain.id)
+ PyDefnRepr(allDefs, allRegs)
+ }
+
+ private def mkRepr(defn: DomainMember.User, isLatestVersion: Boolean): PyDefnRepr = {
+ val genMarker = if (isLatestVersion) iBaboonGeneratedLatest else iBaboonGenerated
+ val codecMeta = codecs.map(_.codecMeta(defn.id).member)
+ val mainMeta = pyDomTrees.makeDataMeta(defn) ++ codecMeta
+ val jsonCodecActive = codecs.collectFirst { case jsonCodec: PyJsonCodecGenerator => jsonCodec }.exists(_.isActive(defn.id))
+ defn.defn match {
+ case dto: Typedef.Dto =>
+ val dtoContracts = dto.contracts
+ val dtoContractsDefs = dtoContracts.flatMap(domain.defs.meta.nodes.get).collect { case DomainMember.User(_, c: Typedef.Contract, _, _) => c }
+ val contractsFields = dtoContractsDefs.flatMap(_.fields)
+
+ val dtoFieldsTrees = genDtoFields(dto.fields, contractsFields.toSet)
+ val dtoProperties = genDtoProperties(contractsFields)
+
+ val contractParents = dto.contracts.toSeq
+ val adtParent = dto.id.owner match {
+ case Owner.Adt(id) => List(id)
+ case _ => Nil
+ }
+ val directParentsDefs = (adtParent ++ contractParents).flatMap(domain.defs.meta.nodes.get).collect { case u: DomainMember.User => u }
+
+ val superclasses = baboonEnquiries.collectParents(domain, directParentsDefs).toSet
+ val uniqueContracts = dtoContracts.filterNot(c1 => superclasses.contains(c1))
+ val genMarkerParent = if (adtParent.nonEmpty || contractParents.nonEmpty) Nil else List(genMarker)
+ val adtMemberMetaParent = if (adtParent.isEmpty) Nil else List(iBaboonAdtMemberMeta)
+
+ val superclassesTypes = (adtParent ++ uniqueContracts).map(c => typeTranslator.asPyType(c, domain, evolution, fileTools.definitionsBasePkg))
+
+ val parentTypes = superclassesTypes ++ genMarkerParent ++ adtMemberMetaParent :+ pydanticBaseModel
+
+ val parents = mkParents(parentTypes)
+
+ val modelConfig = genDtoPydanticModelConf(dto.fields, dtoContracts.nonEmpty, jsonCodecActive)
+
+ val members =
+ List(
+ Some(dtoFieldsTrees.joinN()),
+ Some(modelConfig),
+ dtoProperties.map(_.joinN()),
+ Some(mainMeta.joinN()),
+ ).flatten
+
+ PyDefnRepr(
+ q"""class ${dto.id.name.name}($parents):
+ | ${members.joinNN().shift(4).trim}
+ |""".stripMargin,
+ List.empty,
+ )
+
+ case enum: Typedef.Enum =>
+ val branches = enum.members.map(m => q"${m.name.capitalize} = \"${m.name.capitalize}\"").toSeq
+ PyDefnRepr(
+ q"""|class ${enum.id.name.name}($pyEnum):
+ | ${branches.joinN().shift(4).trim}
+ |""".stripMargin,
+ List.empty,
+ )
+ case adt: Typedef.Adt =>
+ val contracts = adt.contracts.map(c => typeTranslator.asPyType(c, domain, evolution, fileTools.definitionsBasePkg))
+ val defaultParents = contracts ++ List(pydanticBaseModel)
+ val genMarkerParent = if (adt.contracts.isEmpty) List(genMarker) else Nil
+ val allParents = defaultParents ++ genMarkerParent
+ val parents = mkParents(allParents)
+
+ val memberTrees = adt.members.map(
+ mid =>
+ domain.defs.meta.nodes.get(mid) match {
+ case Some(mdefn: DomainMember.User) => makeFullRepr(mdefn)
+ case m => throw new RuntimeException(s"BUG: missing/wrong adt member: $mid => $m")
+ }
+ )
+
+ val branches = memberTrees
+ .map(_.defn)
+ .toSeq
+
+ val jsonCodec = if (jsonCodecActive) {
+ Some(q"""
+ |__registry__: dict[str, type] = $pyDefaultDict()
+ |
+ |def __init_subclass__(cls, **kwargs):
+ | super().__init_subclass__(**kwargs)
+ | ${adt.id.name.name}.__registry__[cls.__name__] = cls
+ |
+ |@$pydanticModelSerializer(mode='wrap')
+ |def serialize(self, serializer):
+ | return {self.__class__.__name__: serializer(self)}
+ |
+ |@$pydanticModelValidator(mode="wrap")
+ |@$pyClassMethod
+ |def polymorphic(cls, values, handler):
+ | if isinstance(values, dict) and len(values) == 1:
+ | class_name = next(iter(values))
+ | registry = ${adt.id.name.name}.__registry__
+ |
+ | if class_name in registry:
+ | candidate = registry[class_name]
+ | if issubclass(candidate, cls):
+ | return candidate.model_validate(values[class_name])
+ | else:
+ | raise ValueError("not subcluss")
+ |
+ | return handler(values)
+ |""".stripMargin)
+ } else None
+
+ val members = List(
+ jsonCodec,
+ Some(mainMeta.joinN()),
+ ).flatten
+
+ val regs = memberTrees.map(_.codecs)
+
+ PyDefnRepr(
+ q"""|class ${adt.id.name.name}($parents):
+ | pass
+ |
+ | ${members.joinNN().shift(4).trim}
+ |
+ |${branches.joinNN()}
+ |""".stripMargin,
+ regs.toList.flatten,
+ )
+
+ case contract: Typedef.Contract =>
+ val contracts = contract.contracts.map(c => typeTranslator.asPyType(c, domain, evolution, fileTools.definitionsBasePkg))
+ val allParents = if (contract.contracts.isEmpty) List(genMarker, pyABC) ++ contracts else contracts
+ val parents = mkParents(allParents)
+ val methods = contract.fields.map {
+ f =>
+ val tpe = typeTranslator.asPyRef(f.tpe, domain, evolution, fileTools.definitionsBasePkg)
+ val name = s"${f.name.name}"
+ q"""@$pyAbstractMethod
+ |def $name(self) -> $tpe:
+ | raise NotImplementedError
+ |""".stripMargin
+ }
+ val allMethods = if (methods.isEmpty) q"pass" else methods.joinN()
+ PyDefnRepr(
+ q"""|class ${contract.id.name.name}($parents):
+ | ${allMethods.shift(4).trim}
+ |""".stripMargin,
+ List.empty,
+ )
+ case _: Typedef.Service => PyDefnRepr(q"", List.empty)
+ case _: Typedef.Foreign => PyDefnRepr(q"", List.empty)
+ }
+
+ }
+
+ private def genDtoFields(dtoFields: List[Field], contractsFields: Set[Field]): List[TextTree[PyValue]] = {
+ val fields = dtoFields.map {
+ field =>
+ val fieldName = field.name.name
+ val fieldType = typeTranslator.asPyRef(field.tpe, domain, evolution, fileTools.definitionsBasePkg)
+
+ if (contractsFields.contains(field)) {
+ q"${fieldName}_: $fieldType = $pydanticField(alias='$fieldName', serialization_alias='$fieldName')"
+ } else q"$fieldName: $fieldType"
+ }
+ if (fields.isEmpty) List(q"pass") else fields
+ }
+
+ private def genDtoProperties(contractsFields: List[Field]): Option[List[TextTree[PyValue]]] = {
+ if (contractsFields.nonEmpty) {
+ val properties = contractsFields
+ .map(f => q"""@property
+ |def ${f.name.name}(self) -> ${typeTranslator.asPyRef(f.tpe, domain, evolution, fileTools.definitionsBasePkg)}:
+ | return self.${f.name.name}_
+ |""".stripMargin)
+ Some(properties)
+ } else None
+ }
+
+ private def genDtoPydanticModelConf(dtoFields: List[Field], hasContracts: Boolean, jsonCodecActive: Boolean): TextTree[PyType] = {
+ val frozen = Some(q"frozen=True")
+ val serializeByAlias = if (jsonCodecActive && hasContracts) Some(q"serialize_by_alias=True") else None
+ val serializeJsonBytesAsHex =
+ if (dtoFields.map(_.tpe.id).contains(TypeId.Builtins.bytes)) {
+ List(
+ q"ser_json_bytes='hex'",
+ q"val_json_bytes='hex'",
+ )
+ } else Nil
+
+ val serializeDecimalAsJsonNumber =
+ if (dtoFields.map(_.tpe.id).contains(TypeId.Builtins.f128)) {
+ List(q"json_encoders={Decimal: float}")
+ } else Nil
+
+ val configs = List(frozen, serializeByAlias, serializeJsonBytesAsHex, serializeDecimalAsJsonNumber).flatten
+
+ q"""model_config = $pydanticConfigDict(
+ | ${configs.join(",\n").shift(4).trim}
+ |)""".stripMargin
+ }
+
+ private def mkParents(refs: List[PyType]): TextTree[PyValue] = {
+ if (refs.isEmpty) q"" else q"${refs.map(s => q"$s").join(", ")}"
+ }
+
+ private def getOutputPath(defn: DomainMember.User, prefix: Option[String] = None, suffix: Option[String] = None): String = {
+ val fbase = fileTools.basename(domain, evolution)
+ val fname = s"${prefix.getOrElse("")}${defn.id.name.name}${suffix.getOrElse("")}.py"
+ defn.defn.id.owner match {
+ case Owner.Toplevel => s"$fbase/$fname"
+ case Owner.Ns(path) => s"$fbase/${path.map(_.name.toLowerCase).mkString("/")}/$fname"
+ case Owner.Adt(id) => s"$fbase/${id.name.name.toLowerCase}.$fname"
+ }
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDomainTreeTools.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDomainTreeTools.scala
new file mode 100644
index 00000000..2da3dc63
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyDomainTreeTools.scala
@@ -0,0 +1,81 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.typer.model.{BaboonEvolution, Domain, DomainMember, Owner, Typedef}
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+trait PyDomainTreeTools {
+ def makeDataMeta(defn: DomainMember.User): Seq[TextTree[PyValue]]
+ def makeCodecMeta(defn: DomainMember.User): Seq[TextTree[PyValue]]
+}
+
+object PyDomainTreeTools {
+ final class PyDomainTreeToolsImpl(
+ domain: Domain,
+ evolution: BaboonEvolution,
+ typeTranslator: PyTypeTranslator,
+ pyFileTools: PyFileTools,
+ ) extends PyDomainTreeTools {
+ override def makeDataMeta(defn: DomainMember.User): Seq[TextTree[PyValue]] = {
+ makeFullMeta(defn, isCodec = false)
+ }
+
+ override def makeCodecMeta(defn: DomainMember.User): Seq[TextTree[PyValue]] = {
+ defn.defn match {
+ case _: Typedef.Enum => makeFullMeta(defn, isCodec = true)
+ case _: Typedef.Foreign => makeFullMeta(defn, isCodec = true)
+ case _ => makeRefMeta(defn)
+ }
+ }
+
+ private def makeFullMeta(defn: DomainMember.User, isCodec: Boolean): Seq[TextTree[PyValue]] = {
+ val adtMethods = defn.id.owner match {
+ case Owner.Adt(id) =>
+ List(
+ q"""baboon_adt_type_identifier: $pyClassVar[$pyStr] = "${id.toString}"
+ |@$pyClassMethod
+ |def baboon_adt_type(cls) -> $pyType:
+ | return cls
+ |""".stripMargin
+ )
+ case _ => List.empty
+ }
+
+ val unmodifiedMethods = if (!isCodec) {
+ val unmodifiedSince = evolution.typesUnchangedSince(domain.version)(defn.id)
+ List(
+ q"""baboon_same_in_version: $pyClassVar[$pyList[$pyStr]] = [${unmodifiedSince.sameIn.map(v => q"\"${v.v.toString}\"").toList.join(", ")}]""".stripMargin
+ )
+ } else Nil
+
+ domainAndTypeMeta(defn) ++ unmodifiedMethods ++ adtMethods
+ }
+
+ private def makeRefMeta(defn: DomainMember.User): Seq[TextTree[PyValue]] = {
+ val pyType = typeTranslator.asPyType(defn.id, domain, evolution, pyFileTools.definitionsBasePkg)
+
+ val adtMethods = defn.id.owner match {
+ case Owner.Adt(id) =>
+ List(
+ q"""baboon_adt_type_identifier: $pyClassVar[$pyStr] = $pyType.baboon_adt_type_identifier
+ |@$pyClassMethod
+ |def baboon_adt_type(cls) -> type:
+ | return type(${typeTranslator.asPyType(id, domain, evolution, pyFileTools.definitionsBasePkg)})
+ |""".stripMargin
+ )
+ case _ => Nil
+ }
+
+ domainAndTypeMeta(defn) ++ adtMethods
+ }
+
+ private def domainAndTypeMeta(defn: DomainMember.User): Seq[TextTree[PyValue]] = {
+ Seq(
+ q"""baboon_domain_version: $pyClassVar[$pyStr] = "${domain.version.v.toString}"""",
+ q"""baboon_domain_identifier: $pyClassVar[$pyStr] = "${defn.id.pkg.toString}"""",
+ q"""baboon_type_identifier: $pyClassVar[$pyStr] = "${defn.id.toString}"""",
+ )
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyFileTools.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyFileTools.scala
new file mode 100644
index 00000000..e5a8d673
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyFileTools.scala
@@ -0,0 +1,64 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerTarget.PyTarget
+import io.septimalmind.baboon.typer.model.{BaboonEvolution, Domain}
+import izumi.fundamentals.collections.nonempty.NEString
+
+trait PyFileTools {
+ def basename(dom: Domain, evolution: BaboonEvolution): String
+ def definitionsBasePkg: List[String]
+ def fixturesBasePkg: List[String]
+ def testsBasePkg: List[String]
+}
+
+object PyFileTools {
+ class ScFileToolsImpl(pyTarget: PyTarget) extends PyFileTools {
+ private val (
+ definitionsBase,
+ fixturesBase,
+ testsBase,
+ ) = collectPackagesBases
+
+ override val definitionsBasePkg: List[String] = definitionsBase.map(_.mkString)
+ override val fixturesBasePkg: List[String] = fixturesBase.map(_.mkString)
+ override val testsBasePkg: List[String] = testsBase.map(_.mkString)
+
+ def basename(dom: Domain, evolution: BaboonEvolution): String = {
+ basename(
+ dom,
+ evolution.latest == dom.version,
+ )
+ }
+
+ private def basename(dom: Domain, omitVersion: Boolean): String = {
+ val base = dom.id.path.map(_.toLowerCase)
+ val version = dom.version.format(prefix = "v", delimiter = "_")
+ val segments = if (omitVersion) base else base ++ Seq(version)
+ segments.mkString("/")
+ }
+
+ private def collectPackagesBases: (List[NEString], List[NEString], List[NEString]) = {
+ val basePaths = List(
+ pyTarget.output.output.segments.toList,
+ pyTarget.output.fixturesOutput.map(_.segments).getOrElse(Nil),
+ pyTarget.output.testsOutput.map(_.segments).getOrElse(Nil),
+ )
+
+ val longestCommonPrefix = basePaths.reduceLeft {
+ (currentPrefix, nextSequence) =>
+ currentPrefix
+ .zip(nextSequence)
+ .takeWhile { case (a, b) => a == b }
+ .map(_._1)
+ }
+
+ if (longestCommonPrefix.nonEmpty) {
+ (
+ pyTarget.output.output.segments.drop(longestCommonPrefix.size).toList,
+ pyTarget.output.fixturesOutput.map(_.segments).getOrElse(Nil).drop(longestCommonPrefix.size).toList,
+ pyTarget.output.testsOutput.map(_.segments).getOrElse(Nil).drop(longestCommonPrefix.size).toList,
+ )
+ } else (List(pyTarget.output.output.name), Nil, Nil)
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyJsonCodecGenerator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyJsonCodecGenerator.scala
new file mode 100644
index 00000000..6bf23e74
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyJsonCodecGenerator.scala
@@ -0,0 +1,105 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerTarget.PyTarget
+import io.septimalmind.baboon.parser.model.DerivationDecl
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.model.{BaboonEvolution, Domain, DomainMember, TypeId, Typedef}
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+final class PyJsonCodecGenerator(
+ typeTranslator: PyTypeTranslator,
+ treeTools: PyDomainTreeTools,
+ pyFileTools: PyFileTools,
+ evolution: BaboonEvolution,
+ pyTarget: PyTarget,
+ domain: Domain,
+) extends PyCodecTranslator {
+ override def translate(defn: DomainMember.User, pyRef: PyType, srcRef: PyType): Option[TextTree[PyValue]] = {
+ (defn.defn match {
+ case _: Typedef.Dto => Some(genDtoBodies(pyRef))
+ case _: Typedef.Adt => Some(genAdtBodies(pyRef))
+ case _: Typedef.Enum => Some(genEnumBodies(pyRef))
+ case _: Typedef.Foreign => Some(genForeignTypesBodies(pyRef))
+ case _: Typedef.Service => None
+ case _: Typedef.Contract => None
+ }).map {
+ case (enc, dec) => genCodec(defn, pyRef, srcRef, enc, dec)
+ }
+ }
+
+ private def genCodec(
+ defn: DomainMember.User,
+ pyRef: PyType,
+ srcRef: PyType,
+ enc: TextTree[PyValue],
+ dec: TextTree[PyValue],
+ ): TextTree[PyValue] = {
+ val baseMethods =
+ q"""def encode(self, value: $pyRef) -> $pyStr:
+ | ${enc.shift(4).trim}
+ |
+ |def decode(self, wire: $pyStr) -> $pyRef:
+ | ${dec.shift(4).trim}
+ |""".stripMargin
+
+ val codecParent = q"$baboonJsonCodec[$pyRef]"
+ val codecName = q"${srcRef.name}_JsonCodec"
+ q"""class $codecName($codecParent):
+ | ${baseMethods.shift(4).trim}
+ |
+ | ${treeTools.makeCodecMeta(defn).joinN().shift(4).trim}
+ |
+ | @$pyClassMethod
+ | @$pyCache
+ | def instance (cls):
+ | return cls()
+ |""".stripMargin
+ }
+
+ private def genForeignTypesBodies(name: PyType): (TextTree[PyValue], TextTree[PyValue]) = {
+ (
+ q"""raise ValueError(f"$name is a foreign type")""",
+ q"""raise ValueError(f"$name is a foreign type")""",
+ )
+ }
+
+ private def genEnumBodies(name: PyType): (TextTree[PyValue], TextTree[PyValue]) = {
+ val encode = q"""return $pyJsonDumps(value.value)""".stripMargin
+ val decode = q"""return $name($pyJsonLoads(wire))""".stripMargin
+ (encode, decode)
+ }
+
+ private def genAdtBodies(name: PyType): (TextTree[PyValue], TextTree[PyValue]) = {
+ val encode = q"""return value.model_dump_json()""".stripMargin
+ val decode = q"""return $name.model_validate_json(wire)""".stripMargin
+ (encode, decode)
+ }
+
+ private def genDtoBodies(name: PyType): (TextTree[PyValue], TextTree[PyValue]) = {
+ val encode = q"""return value.model_dump_json()""".stripMargin
+ val decode = q"""return $name.model_validate_json(wire)""".stripMargin
+ (encode, decode)
+ }
+
+ override def codecType(tid: TypeId.User): PyType = {
+ val typeName = s"${tid.name.name}_JsonCodec"
+ val moduleId = typeTranslator.toPyModule(tid, domain.version, evolution, pyFileTools.definitionsBasePkg)
+ PyType(moduleId, typeName)
+ }
+
+ override def codecMeta(tid: TypeId.User): PyCodecTranslator.CodecMeta = {
+ val meta = q"""@$pyStaticMethod
+ |def codec_json():
+ | return ${codecType(tid)}.instance()""".stripMargin
+ PyCodecTranslator.CodecMeta(meta)
+ }
+
+ override def isActive(id: TypeId): Boolean = {
+ pyTarget.language.generateJsonCodecs && (pyTarget.language.generateJsonCodecsByDefault || domain.derivationRequests
+ .getOrElse(DerivationDecl("json"), Set.empty[TypeId]).contains(id))
+ }
+
+ override def id: String = "json"
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypeTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypeTranslator.scala
new file mode 100644
index 00000000..e65f880c
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypeTranslator.scala
@@ -0,0 +1,147 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.{PyModuleId, PyType}
+import io.septimalmind.baboon.typer.model.*
+import izumi.fundamentals.collections.nonempty.NEList
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+final class PyTypeTranslator {
+ def asPyRef(
+ tpe: TypeRef,
+ domain: Domain,
+ evolution: BaboonEvolution,
+ pkgBase: List[String] = Nil,
+ ): TextTree[PyValue] = {
+ tpe match {
+ case TypeRef.Scalar(id) =>
+ q"${asPyType(id, domain, evolution, pkgBase)}"
+ case TypeRef.Constructor(id, args) =>
+ val tpe = asPyType(id, domain, evolution, pkgBase)
+ val targs = args.map(asPyRef(_, domain, evolution, pkgBase))
+ q"$tpe[${targs.toSeq.join(", ")}]"
+ }
+ }
+
+ def asPyType(
+ tpe: TypeId,
+ domain: Domain,
+ evolution: BaboonEvolution,
+ pkgBase: List[String] = Nil,
+ ): PyType = {
+ tpe match {
+ case b: TypeId.BuiltinScalar =>
+ b match {
+ case TypeId.Builtins.i08 | TypeId.Builtins.u08 => pyInt
+ case TypeId.Builtins.i16 | TypeId.Builtins.u16 => pyInt
+ case TypeId.Builtins.i32 | TypeId.Builtins.u32 => pyInt
+ case TypeId.Builtins.i64 | TypeId.Builtins.u64 => pyInt
+ case TypeId.Builtins.f32 => pyFloat
+ case TypeId.Builtins.f64 => pyFloat
+ case TypeId.Builtins.f128 => pyDecimal
+ case TypeId.Builtins.str => pyStr
+ case TypeId.Builtins.uid => pyUuid
+ case TypeId.Builtins.tso | TypeId.Builtins.tsu => pyDateTime
+ case TypeId.Builtins.bit => pyBool
+ case TypeId.Builtins.bytes => pyBytes
+
+ case other => throw new IllegalArgumentException(s"Unexpected: $other")
+ }
+ case TypeId.Builtins.map => pyDict
+ case TypeId.Builtins.lst => pyList
+ case TypeId.Builtins.set => pySet
+ case TypeId.Builtins.opt => pyOpt
+ case uid: TypeId.User => asPyTypeDerefForeign(uid, domain, evolution, pkgBase)
+ case other => throw new IllegalArgumentException(s"Unexpected: $other")
+ }
+ }
+
+ def asPyTypeDerefForeign(
+ tid: TypeId.User,
+ domain: Domain,
+ evolution: BaboonEvolution,
+ pkgBase: List[String] = Nil,
+ ): PyType = {
+ derefForeign(tid, domain).getOrElse(asPyTypeKeepForeigns(tid, domain, evolution, pkgBase))
+ }
+
+ def asPyTypeKeepForeigns(
+ tid: TypeId.User,
+ domain: Domain,
+ evolution: BaboonEvolution,
+ pkgBase: List[String] = Nil,
+ ): PyType = {
+ val module = toPyModule(tid, domain.version, evolution, pkgBase)
+ PyType(module, s"${tid.name.name.capitalize}")
+ }
+
+ def asPyTypeVersioned(
+ tid: TypeId.User,
+ domain: Domain,
+ evolution: BaboonEvolution,
+ pkgBase: List[String],
+ ): PyType = {
+ derefForeign(tid, domain).getOrElse {
+ val moduleId = toPyModule(tid, domain.version, evolution, pkgBase)
+ val versionString = domain.version.format(prefix = "v", delimiter = "_")
+ val ownerPath = renderOwner(tid.owner)
+ val moduleName = typeModuleName(tid) match {
+ case Some(name) => ownerPath :+ name
+ case None => ownerPath
+ }
+ val typeName = List(tid.name.name)
+ val fullTypeName = (List(versionString) ++ moduleName ++ typeName).mkString(".")
+ PyType(moduleId, fullTypeName, versioned = true)
+ }
+ }
+
+ def toPyModule(
+ tid: TypeId.User,
+ version: Version,
+ evolution: BaboonEvolution,
+ pkgBase: List[String],
+ ): PyModuleId = {
+ val pathToModule = tid.pkg.path.toList
+ val versionPathPart = if (version != evolution.latest) List(version.format(prefix = "v", delimiter = "_")) else Nil
+ val ownerPath = renderOwner(tid.owner)
+ val name = typeModuleName(tid).toList
+ val fullPath = pkgBase ++ pathToModule ++ versionPathPart ++ ownerPath ++ name
+ PyModuleId(NEList.unsafeFrom(fullPath), if (version == evolution.latest) None else Some(version))
+ }
+
+ def toPyModule(pkg: Pkg): PyModuleId = {
+ PyModuleId(NEList.unsafeFrom(pkg.path.toList))
+ }
+
+ private def derefForeign(
+ tid: TypeId.User,
+ domain: Domain,
+ ): Option[PyType] = {
+ domain.defs.meta.nodes(tid) match {
+ case DomainMember.User(_, defn: Typedef.Foreign, _, _) =>
+ val foreign = defn.bindings("py")
+ val parts = foreign.decl.split('.').toList
+ assert(parts.length > 1)
+ val module = parts.init
+ val id = parts.last
+ Some(PyType(PyModuleId(NEList.unsafeFrom(module)), id))
+ case _ => None
+ }
+ }
+
+ private def typeModuleName(tid: TypeId.User): Option[String] = {
+ tid.owner match {
+ case _: Owner.Adt => None
+ case _ => Some(tid.name.name)
+ }
+ }
+
+ private def renderOwner(owner: Owner): List[String] = {
+ owner match {
+ case Owner.Toplevel => Nil
+ case Owner.Ns(path) => path.map(_.name.toLowerCase).toList
+ case Owner.Adt(id) => renderOwner(id.owner) :+ id.name.name
+ }
+ }
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypes.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypes.scala
new file mode 100644
index 00000000..927c747c
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyTypes.scala
@@ -0,0 +1,110 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.translator.python.PyValue.{PyModuleId, PyType}
+
+object PyTypes {
+ // baboon
+
+ // baboon modules
+ val pyBaboonSharedRuntimeModule: PyModuleId = PyModuleId(s"baboon_runtime_shared")
+ val pyBaboonCodecsModule: PyModuleId = PyModuleId(s"baboon_codecs")
+
+ // python modules
+ val pyBuiltins: PyModuleId = PyModuleId("builtins")
+ val pyUUUIDModule: PyModuleId = PyModuleId("uuid")
+ val pyDateTimeModule: PyModuleId = PyModuleId("datetime")
+ val pyTypingModule: PyModuleId = PyModuleId("typing")
+ val pyEnumModule: PyModuleId = PyModuleId("enum")
+ val pyAbcModule: PyModuleId = PyModuleId("abc")
+ val pyJson: PyModuleId = PyModuleId("json")
+ val pyFuncTools: PyModuleId = PyModuleId("functools")
+ val pyUnittest: PyModuleId = PyModuleId("unittest")
+ val pyIO: PyModuleId = PyModuleId("io")
+ val pyDecimalModule: PyModuleId = PyModuleId("decimal")
+ val pyCollections: PyModuleId = PyModuleId("collections")
+
+ // python external modules
+ val pydantic: PyModuleId = PyModuleId("pydantic")
+
+ // baboon meta
+ val baboonMeta: PyType = PyType(pyBaboonSharedRuntimeModule, "BaboonMeta")
+ val iBaboonGenerated: PyType = PyType(pyBaboonSharedRuntimeModule, "IBaboonGenerated")
+ val iBaboonGeneratedLatest: PyType = PyType(pyBaboonSharedRuntimeModule, "IBaboonGeneratedLatest")
+ val iBaboonAdtMemberMeta: PyType = PyType(pyBaboonSharedRuntimeModule, "IBaboonAdtMemberMeta")
+
+ // baboon codecs
+ val baboonJsonCodec: PyType = PyType(pyBaboonCodecsModule, "BaboonJsonCodec")
+ val baboonUEBACodec: PyType = PyType(pyBaboonCodecsModule, "BaboonUEBACodec")
+ def abstractBaboonCodecs(id: String): PyType = PyType(pyBaboonCodecsModule, s"AbstractBaboon${id.capitalize}Codecs")
+ val baboonCodecContext: PyType = PyType(pyBaboonCodecsModule, "BaboonCodecContext")
+ val baboonUEBACodecIndexed: PyType = PyType(pyBaboonCodecsModule, "BaboonBinCodecIndexed")
+
+ // baboon conversions
+ val baboonAbstractConversion: PyType = PyType(pyBaboonSharedRuntimeModule, "BaboonAbstractConversion")
+ val baboonAbstractConversions: PyType = PyType(pyBaboonSharedRuntimeModule, "BaboonAbstractConversions")
+
+ // baboon types
+ val deprecated: PyType = PyType(pyBaboonSharedRuntimeModule, "deprecated")
+ val baboonFixture: PyType = PyType(pyBaboonSharedRuntimeModule, "Fixture")
+ val baboonLEDataOutputStream: PyType = PyType(pyBaboonCodecsModule, "LEDataOutputStream")
+ val baboonLEDataInputStream: PyType = PyType(pyBaboonCodecsModule, "LEDataInputStream")
+ val baboonSafeList: PyType = PyType(pyBaboonCodecsModule, "SafeList")
+ val baboonSafeSet: PyType = PyType(pyBaboonCodecsModule, "SafeSet")
+
+ // python
+
+ // builtins
+ val pyInt = PyType(pyBuiltins, "int")
+ val pyFloat = PyType(pyBuiltins, "float")
+ val pyDecimal = PyType(pyDecimalModule, "Decimal")
+ val pyBool = PyType(pyBuiltins, "bool")
+ val pyStr = PyType(pyBuiltins, "str")
+ val pyType = PyType(pyBuiltins, "type")
+ val pyDict = PyType(pyBuiltins, "dict")
+ val pySet = PyType(pyBuiltins, "set")
+ val pyList = PyType(pyBuiltins, "list")
+ val pyClassMethod = PyType(pyBuiltins, "classmethod")
+ val pyStaticMethod = PyType(pyBuiltins, "staticmethod")
+ val pyBytes = PyType(pyBuiltins, "bytes")
+
+ // unittest
+ val pyTestCase = PyType(pyUnittest, "TestCase")
+
+ // io
+ val pyBytesIO = PyType(pyIO, "BytesIO")
+
+ // functools
+ val pyCache = PyType(pyFuncTools, "cache")
+
+ // pydantic
+ val pydanticField = PyType(pydantic, "Field")
+ val pydanticBaseModel = PyType(pydantic, "BaseModel")
+ val pydanticConfigDict = PyType(pydantic, "ConfigDict")
+ val pydanticModelSerializer = PyType(pydantic, "model_serializer")
+ val pydanticModelValidator = PyType(pydantic, "model_validator")
+
+ // typing
+ val pyOpt = PyType(pyTypingModule, "Optional")
+ val pyOverride = PyType(pyTypingModule, "override")
+ val pyClassVar = PyType(pyTypingModule, "ClassVar")
+
+ // json
+ val pyJsonDumps = PyType(pyJson, "dumps")
+ val pyJsonLoads = PyType(pyJson, "loads")
+
+ // uuid
+ val pyUuid = PyType(pyUUUIDModule, "UUID")
+
+ // datetime
+ val pyDateTime = PyType(pyDateTimeModule, "datetime")
+
+ // enum
+ val pyEnum = PyType(pyEnumModule, "Enum")
+
+ // abc
+ val pyABC: PyType = PyType(pyAbcModule, "ABC")
+ val pyAbstractMethod = PyType(pyAbcModule, "abstractmethod")
+
+ // collections
+ val pyDefaultDict: PyType = PyType(pyCollections, "defaultdict")
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyUEBACodecGenerator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyUEBACodecGenerator.scala
new file mode 100644
index 00000000..ed4621f2
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyUEBACodecGenerator.scala
@@ -0,0 +1,435 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.CompilerTarget.PyTarget
+import io.septimalmind.baboon.parser.model.DerivationDecl
+import io.septimalmind.baboon.translator.python.PyTypes.*
+import io.septimalmind.baboon.translator.python.PyValue.PyType
+import io.septimalmind.baboon.typer.model.*
+import izumi.fundamentals.platform.strings.TextTree
+import izumi.fundamentals.platform.strings.TextTree.Quote
+
+class PyUEBACodecGenerator(
+ typeTranslator: PyTypeTranslator,
+ treeTools: PyDomainTreeTools,
+ evolution: BaboonEvolution,
+ pyFileTools: PyFileTools,
+ pyTarget: PyTarget,
+ domain: Domain,
+) extends PyCodecTranslator {
+ override def translate(
+ defn: DomainMember.User,
+ pyRef: PyValue.PyType,
+ srcRef: PyValue.PyType,
+ ): Option[TextTree[PyValue]] = {
+ (defn.defn match {
+ case d: Typedef.Dto => Some(genDtoBodies(pyRef, d))
+ case e: Typedef.Enum => Some(genEnumBodies(e))
+ case a: Typedef.Adt => Some(genAdtBodies(pyRef, a))
+ case _: Typedef.Foreign => Some(genForeignTypesBodies(pyRef))
+ case _: Typedef.Service => None
+ case _: Typedef.Contract => None
+ }).map {
+ case (enc, dec) =>
+ val branchDecoder = defn.defn match {
+ case d: Typedef.Dto => genBranchDecoder(pyRef, d)
+ case _ => None
+ }
+ genCodec(defn, pyRef, srcRef, enc, dec, branchDecoder)
+ }
+ }
+
+ private def genCodec(
+ defn: DomainMember.User,
+ pyRef: PyType,
+ srcRef: PyType,
+ enc: TextTree[PyValue],
+ dec: TextTree[PyValue],
+ branchDecoder: Option[TextTree[PyValue]],
+ ): TextTree[PyValue] = {
+ val indexBody = defn.defn match {
+ case d: Typedef.Dto =>
+ val varlens = d.fields.filter(f => domain.refMeta(f.tpe).len.isVariable)
+ val comment = varlens.map(f => q"# ${f.toString}").joinN()
+ q"""$comment
+ |return ${varlens.size.toString}""".stripMargin
+
+ case _: Typedef.Enum => q"return 0"
+ case _: Typedef.Adt => q"return 0"
+ case _: Typedef.Foreign => q"""raise ValueError("$pyRef is a foreign type")"""
+
+ case d: Typedef.Contract => throw new IllegalArgumentException(s"BUG: contract codec should not be rendered: $d")
+ case d: Typedef.Service => throw new IllegalArgumentException(s"BUG: service codec should not be rendered: $d")
+ }
+
+ val indexMethods = List(
+ q"""def index_elements_count(self, ctx: $baboonCodecContext) -> $pyInt:
+ | ${indexBody.shift(4).trim}
+ |""".stripMargin
+ )
+
+ val methods =
+ List(q"""def encode(self, ctx: $baboonCodecContext, wire: $baboonLEDataOutputStream, value: $pyRef):
+ | ${enc.shift(4).trim}
+ |
+ |def decode(self, ctx: $baboonCodecContext, wire: $baboonLEDataInputStream) -> $pyRef:
+ | ${dec.shift(4).trim}
+ |""".stripMargin) ++ branchDecoder.map {
+ body =>
+ q"""def decode_branch(self, ctx: $baboonCodecContext, wire: $baboonLEDataInputStream) -> $pyRef:
+ | ${body.shift(4).trim}
+ |""".stripMargin
+ }.toList ++ indexMethods
+
+ val parents = defn.defn match {
+ case _: Typedef.Enum => List(q"$baboonUEBACodec[${pyRef.name}]", q"$baboonUEBACodecIndexed")
+ case _ =>
+ val adtParents: List[TextTree[PyValue]] = defn.id.owner match {
+ case Owner.Adt(_) => List(q"$iBaboonAdtMemberMeta")
+ case _ => List.empty
+ }
+
+ List(
+ q"$baboonUEBACodec[$pyRef]",
+ q"$baboonUEBACodecIndexed",
+ ) ++ adtParents
+ }
+
+ val name = q"${srcRef.name}_UEBACodec"
+
+ q"""class $name(${parents.join(", ")}):
+ | ${methods.joinNN().shift(4).trim}
+ |
+ | ${treeTools.makeCodecMeta(defn).joinN().shift(4).trim}
+ |
+ | @$pyClassMethod
+ | @$pyCache
+ | def instance (cls):
+ | return cls()
+ |""".stripMargin
+ }
+
+ private def genBranchDecoder(
+ name: PyType,
+ dto: Typedef.Dto,
+ ): Option[TextTree[PyValue]] = {
+ dto.id.owner match {
+ case Owner.Adt(_) if pyTarget.language.wrappedAdtBranchCodecs =>
+ val fieldsCodecs = fieldsOf(dto).map { case (enc, dec, _) => (enc, dec) }
+ Some(genDtoDecoder(name, fieldsCodecs, dto))
+ case _ => None
+ }
+ }
+
+ private def genEnumBodies(enum: Typedef.Enum): (TextTree[PyValue], TextTree[PyValue]) = {
+ val branches = enum.members.zipWithIndex.toList.map {
+ case (m, i) =>
+ (
+ q"""if value.value == "${m.name}":
+ | wire.write_byte(${i.toString})
+ | return
+ |""".stripMargin,
+ q"""if as_byte == ${i.toString}:
+ | return ${enum.id.name.name}.${m.name}
+ |""".stripMargin,
+ )
+ }
+
+ (
+ q"""${branches.map(_._1).joinN()}
+ |
+ |raise ValueError(f"Cannot encode {value} to ${enum.id.name.name}: no matching value")""".stripMargin,
+ q"""as_byte = wire.read_byte()
+ |
+ |${branches.map(_._2).joinN()}
+ |
+ |raise ValueError(f"Cannot decode {wire} to ${enum.id.name.name}: no matching value")""".stripMargin,
+ )
+ }
+
+ private def genAdtBodies(name: PyType, adt: Typedef.Adt): (TextTree[PyValue], TextTree[PyValue]) = {
+ val branches = adt.dataMembers(domain).zipWithIndex.map {
+ case (member, i) =>
+ val cName = codecType(member)
+ val encoder = {
+ if (pyTarget.language.wrappedAdtBranchCodecs) {
+ q"""if isinstance(value, ${member.name.name}):
+ | $cName.instance().encode(ctx, wire, value)
+ | return
+ |""".stripMargin
+ } else {
+ q"""if isinstance(value, ${member.name.name}):
+ | wire.write_byte(${i.toString})
+ | $cName.instance().encode(ctx, wire, value)
+ | return
+ |""".stripMargin
+ }
+ }
+ val decoder = {
+ if (pyTarget.language.wrappedAdtBranchCodecs) {
+ q"""if as_byte == ${i.toString}:
+ | return $cName.instance().decode_branch(ctx, wire)
+ |""".stripMargin
+ } else {
+ q"""if as_byte == ${i.toString}:
+ | return $cName.instance().decode(ctx, wire)
+ |""".stripMargin
+ }
+ }
+ (encoder, decoder)
+ }
+
+ (
+ q"""${branches.map(_._1).joinN()}
+ |
+ |raise ValueError(f"Cannot encode {value} to $name: no matching value")
+ |""".stripMargin,
+ q"""as_byte = wire.read_byte()
+ |
+ |${branches.map(_._2).joinN()}
+ |
+ |raise ValueError(f"Cannot decode {wire} to $name: no matching value")
+ |""".stripMargin,
+ )
+ }
+
+ private def genDtoDecoder(name: PyValue.PyType, fields: List[(TextTree[PyValue], TextTree[PyValue])], dto: Typedef.Dto): TextTree[PyValue] = {
+ val fieldsDecoders = dto.fields.zip(fields.map(_._2)).map { case (field, decoder) => q"${field.name.name}=$decoder" }
+ q"""index = self.read_index(ctx, wire)
+ |
+ |if ctx.use_indices:
+ | assert len(index) == self.index_elements_count(ctx)
+ |
+ |return ${name.name}(
+ | ${fieldsDecoders.join(",\n").shift(4).trim}
+ |)
+ |""".stripMargin
+ }
+
+ private def genDtoBodies(name: PyType, dto: Typedef.Dto): (TextTree[PyValue], TextTree[PyValue]) = {
+ def adtBranchIndex(id: TypeId.User) = {
+ domain.defs.meta
+ .nodes(id).asInstanceOf[DomainMember.User]
+ .defn.asInstanceOf[Typedef.Adt]
+ .dataMembers(domain)
+ .zipWithIndex.find(_._1 == dto.id).get._2
+ }
+
+ val fields = fieldsOf(dto)
+
+ val noIndex = Seq(
+ q"wire.write_byte(header)",
+ fields.map(_._1).joinN(),
+ ).filterNot(_.isEmpty).join("\n")
+
+ val fieldsEncoders =
+ q"""header = 0b0000000
+ |
+ |if ctx.use_indices:
+ | header = (header | 0b0000001) & 0xFF
+ | wire.write_byte(header)
+ | write_memory_stream = $pyBytesIO()
+ | fake_writer = $baboonLEDataOutputStream(write_memory_stream)
+ | ${fields.map(_._3).join("\n").shift(4).trim}
+ | write_memory_stream.flush()
+ | wire.write(write_memory_stream.getvalue())
+ |else:
+ | ${noIndex.shift(4).trim}
+ |""".stripMargin
+
+ val fieldsDecoders = genDtoDecoder(name, fields.map { case (a, b, _) => (a, b) }, dto)
+
+ val enc = dto.id.owner match {
+ case Owner.Adt(id) if pyTarget.language.wrappedAdtBranchCodecs =>
+ val idx = adtBranchIndex(id)
+ q"""wire.write_byte(${idx.toString})
+ |$fieldsEncoders""".stripMargin
+ case _ => fieldsEncoders
+ }
+
+ val dec = dto.id.owner match {
+ case Owner.Adt(id) if pyTarget.language.wrappedAdtBranchCodecs =>
+ val idx = adtBranchIndex(id)
+ q"""marker = wire.read_byte()
+ |assert marker == ${idx.toString}
+ |return self.decode_branch(ctx, wire)""".stripMargin
+ case _ => fieldsDecoders
+ }
+
+ (enc, dec)
+ }
+
+ private def fieldsOf(dto: Typedef.Dto): List[(TextTree[PyValue], TextTree[PyValue], TextTree[PyValue])] = {
+ dto.fields.map {
+ f =>
+ val fieldRef = q"value.${f.name.name}"
+ val encoder = mkEncoder(f.tpe, fieldRef, q"wire")
+ val fakeEnc = mkEncoder(f.tpe, fieldRef, q"fake_writer")
+ val dec = mkDecoder(f.tpe)
+
+ val w = domain.refMeta(f.tpe).len match {
+ case BinReprLen.Fixed(bytes) =>
+ q"""# ${f.toString}
+ |before = write_memory_stream.tell()
+ |${fakeEnc.trim}
+ |after = write_memory_stream.tell()
+ |length = after - before
+ |assert length == ${bytes.toString}
+ |""".stripMargin
+
+ case v: BinReprLen.Variable =>
+ val sanityChecks = v match {
+ case BinReprLen.Unknown() =>
+ q"assert after >= before, f\"Got after={after}, before={before}\""
+
+ case BinReprLen.Alternatives(variants) =>
+ q"assert length in {${variants.mkString(", ")}}, f\"Got length={length}\""
+
+ case BinReprLen.Range(min, max) =>
+ List(
+ Some(q"assert length >= ${min.toString}, f\"Got length={length}\" "),
+ max.map(m => q"assert length <= ${m.toString}, $$\"Got length={length}\""),
+ ).flatten.joinN()
+ }
+
+ q"""# ${f.toString}
+ |before = write_memory_stream.tell()
+ |wire.write_i32(before)
+ |${fakeEnc.trim}
+ |after = write_memory_stream.tell()
+ |length = after - before
+ |wire.write_i32(length)
+ |${sanityChecks.trim}
+ |""".stripMargin
+ }
+ (encoder, dec, w)
+ }
+ }
+
+ private def mkEncoder(tpe: TypeRef, ref: TextTree[PyValue], writerRef: TextTree[PyValue]): TextTree[PyValue] = {
+ tpe match {
+ case TypeRef.Scalar(id) =>
+ id match {
+ case s: TypeId.BuiltinScalar =>
+ s match {
+ case TypeId.Builtins.bit => q"$writerRef.write_bool($ref)"
+ case TypeId.Builtins.i08 => q"$writerRef.write_byte($ref)"
+ case TypeId.Builtins.i16 => q"$writerRef.write_i16($ref)"
+ case TypeId.Builtins.i32 => q"$writerRef.write_i32($ref)"
+ case TypeId.Builtins.i64 => q"$writerRef.write_i64($ref)"
+ case TypeId.Builtins.u08 => q"$writerRef.write_ubyte($ref)"
+ case TypeId.Builtins.u16 => q"$writerRef.write_u16($ref)"
+ case TypeId.Builtins.u32 => q"$writerRef.write_u32($ref)"
+ case TypeId.Builtins.u64 => q"$writerRef.write_u64($ref)"
+ case TypeId.Builtins.f32 => q"$writerRef.write_f32($ref)"
+ case TypeId.Builtins.f64 => q"$writerRef.write_f64($ref)"
+
+ case TypeId.Builtins.f128 => q"$writerRef.write_f128($ref)"
+ case TypeId.Builtins.str => q"$writerRef.write_str($ref)"
+
+ case TypeId.Builtins.uid => q"$writerRef.write_uuid($ref)"
+ case TypeId.Builtins.tsu => q"$writerRef.write_datetime($ref)"
+ case TypeId.Builtins.tso => q"$writerRef.write_datetime($ref)"
+
+ case TypeId.Builtins.bytes => q"$writerRef.write_bytes($ref)"
+
+ case o => throw new RuntimeException(s"BUG: Unexpected type: $o")
+ }
+ case u: TypeId.User =>
+ val target = codecType(u)
+ q"$target.instance().encode(ctx, $writerRef, $ref)"
+ }
+ case c: TypeRef.Constructor =>
+ c.id match {
+ case TypeId.Builtins.opt =>
+ q"$writerRef.write_optional($ref, lambda v: ${mkEncoder(c.args.head, q"v", writerRef)})"
+ case TypeId.Builtins.map =>
+ val keyEncoder = mkEncoder(c.args.head, q"v", writerRef)
+ val valueEncoder = mkEncoder(c.args.last, q"v", writerRef)
+ q"$writerRef.write_dict($ref, lambda v: $keyEncoder, lambda v: $valueEncoder)"
+ case TypeId.Builtins.lst =>
+ q"$writerRef.write_seq($ref, lambda v: ${mkEncoder(c.args.head, q"v", writerRef)})"
+ case TypeId.Builtins.set =>
+ q"$writerRef.write_seq($ref, lambda v: ${mkEncoder(c.args.head, q"v", writerRef)})"
+ case o =>
+ throw new RuntimeException(s"BUG: Unexpected type: $o")
+ }
+ }
+ }
+
+ private def mkDecoder(tpe: TypeRef): TextTree[PyValue] = {
+ tpe match {
+ case TypeRef.Scalar(id) =>
+ id match {
+ case s: TypeId.BuiltinScalar =>
+ s match {
+ case TypeId.Builtins.bit => q"wire.read_bool()"
+ case TypeId.Builtins.i08 => q"wire.read_byte()"
+ case TypeId.Builtins.i16 => q"wire.read_i16()"
+ case TypeId.Builtins.i32 => q"wire.read_i32()"
+ case TypeId.Builtins.i64 => q"wire.read_i64()"
+ case TypeId.Builtins.u08 => q"wire.read_ubyte()"
+ case TypeId.Builtins.u16 => q"wire.read_u16()"
+ case TypeId.Builtins.u32 => q"wire.read_u32()"
+ case TypeId.Builtins.u64 => q"wire.read_u64()"
+ case TypeId.Builtins.f32 => q"wire.read_f32()"
+ case TypeId.Builtins.f64 => q"wire.read_f64()"
+
+ case TypeId.Builtins.f128 => q"wire.read_f128()"
+ case TypeId.Builtins.str => q"wire.read_str()"
+
+ case TypeId.Builtins.uid => q"wire.read_uuid()"
+ case TypeId.Builtins.tsu => q"wire.read_datetime()"
+ case TypeId.Builtins.tso => q"wire.read_datetime()"
+
+ case TypeId.Builtins.bytes => q"wire.read_bytes()"
+
+ case o => throw new RuntimeException(s"BUG: Unexpected type: $o")
+ }
+ case u: TypeId.User => q"${codecType(u)}.instance().decode(ctx, wire)"
+ }
+ case c: TypeRef.Constructor =>
+ c.id match {
+ case TypeId.Builtins.opt =>
+ q"None if wire.read_byte() == 0 else ${mkDecoder(c.args.head)}"
+ case TypeId.Builtins.map =>
+ val keyDecoder = mkDecoder(c.args.head)
+ val valueDecoder = mkDecoder(c.args.last)
+ q"{$keyDecoder: $valueDecoder for _ in range(wire.read_i32())}"
+ case TypeId.Builtins.lst =>
+ q"[${mkDecoder(c.args.head)} for _ in range(wire.read_i32())]"
+ case TypeId.Builtins.set =>
+ q"{${mkDecoder(c.args.head)} for _ in range(wire.read_i32())}"
+ case o =>
+ throw new RuntimeException(s"BUG: Unexpected type: $o")
+ }
+ }
+ }
+
+ private def genForeignTypesBodies(name: PyType): (TextTree[PyValue], TextTree[PyValue]) = {
+ (
+ q"""raise ValueError(f"${name.name} is a foreign type")""",
+ q"""raise ValueError(f"${name.name} is a foreign type")""",
+ )
+ }
+
+ override def codecType(tid: TypeId.User): PyType = {
+ val typeName = s"${tid.name.name}_UEBACodec"
+ val moduleId = typeTranslator
+ .toPyModule(tid, domain.version, evolution, pyFileTools.definitionsBasePkg)
+ PyType(moduleId, typeName)
+ }
+
+ override def codecMeta(tid: TypeId.User): PyCodecTranslator.CodecMeta = {
+ val meta = q"""@$pyStaticMethod
+ |def codec_ueba():
+ | return ${codecType(tid)}.instance()""".stripMargin
+ PyCodecTranslator.CodecMeta(meta)
+ }
+
+ override def isActive(id: TypeId): Boolean = {
+ pyTarget.language.generateUebaCodecs && (pyTarget.language.generateUebaCodecsByDefault || domain.derivationRequests
+ .getOrElse(DerivationDecl("ueba"), Set.empty[TypeId]).contains(id))
+ }
+
+ override def id: String = "ueba"
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyValue.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyValue.scala
new file mode 100644
index 00000000..2f29f7d2
--- /dev/null
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/python/PyValue.scala
@@ -0,0 +1,24 @@
+package io.septimalmind.baboon.translator.python
+
+import io.septimalmind.baboon.typer.model.Version
+import izumi.fundamentals.collections.nonempty.NEList
+
+sealed trait PyValue
+object PyValue {
+ final case class PyModuleId(path: NEList[String], version: Option[Version] = None) {
+ val module: String = path.last
+ val moduleVersionString: Option[String] = version.map(v => v.format(prefix = "v", delimiter = "_"))
+ val pathToVersion: List[String] = path.toList.takeWhile(p => !moduleVersionString.contains(p))
+ def withModuleName(name: String): PyModuleId = {
+ this.copy(path = NEList.unsafeFrom(path.toList.init :+ name))
+ }
+ val isBaboonModule: Boolean = path.head.startsWith("baboon")
+ }
+ object PyModuleId {
+ def apply(module: String): PyModuleId = {
+ new PyModuleId(NEList.unsafeFrom(List(module)), None)
+ }
+ }
+
+ final case class PyType(moduleId: PyModuleId, name: String, versioned: Boolean = false) extends PyValue
+}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/scl/ScBaboonTranslator.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/scl/ScBaboonTranslator.scala
index 0262e58b..ef239cac 100644
--- a/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/scl/ScBaboonTranslator.scala
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/translator/scl/ScBaboonTranslator.scala
@@ -119,7 +119,6 @@ class ScBaboonTranslator[F[+_, +_]: Error2](
q"""unmodified.put("${tid.toString}", $scList(${version.sameIn.map(_.v.toString).map(s => q"\"$s\"").toList.join(", ")}))"""
}
- scala.collection.mutable.Map.empty[String, String]
val metaTree =
q"""object BaboonMetadata extends $baboonMeta {
| private val unmodified = ${scMutMap.fullyQualified}.empty[$scString, $scList[$scString]]
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/BaboonEnquiries.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/BaboonEnquiries.scala
index c2ae5e3e..352948a6 100644
--- a/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/BaboonEnquiries.scala
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/BaboonEnquiries.scala
@@ -29,6 +29,8 @@ trait BaboonEnquiries {
def isEnum(tpe: TypeRef, domain: Domain): Boolean
def unfold(dom: Domain, contracts: List[TypeId.User]): List[Field]
+
+ def collectParents(domain: Domain, definitions: List[DomainMember.User]): List[TypeId.User]
}
object BaboonEnquiries {
@@ -416,6 +418,40 @@ object BaboonEnquiries {
}
}
+ override def collectParents(domain: Domain, definitions: List[DomainMember.User]): List[TypeId.User] = {
+ def collectUserDefinitions(ids: List[TypeId.User]): List[DomainMember.User] = {
+ ids.flatMap(domain.defs.meta.nodes.get).collect { case u: DomainMember.User => u }
+ }
+
+ @tailrec
+ def loop(acc: List[TypeId.User], toProcess: List[DomainMember.User]): List[TypeId.User] = {
+ toProcess match {
+ case Nil => acc
+ case head :: tail =>
+ head.defn match {
+ case d: Typedef.Dto =>
+ val adtParent = d.id.owner match {
+ case Owner.Adt(id) => collectUserDefinitions(List(id))
+ case _ => List.empty
+ }
+ val contractsDefs = collectUserDefinitions(d.contracts)
+ val all = adtParent.map(_.id) ++ contractsDefs.map(_.id)
+ loop(acc ++ all, tail ++ adtParent ++ contractsDefs)
+
+ case adt: Typedef.Adt =>
+ val contractsDefs = collectUserDefinitions(adt.contracts)
+ loop(acc ++ contractsDefs.map(_.id), tail ++ contractsDefs)
+
+ case c: Typedef.Contract =>
+ val contractsDefs = collectUserDefinitions(c.contracts)
+ loop(acc ++ contractsDefs.map(_.id), tail ++ contractsDefs)
+
+ case _ => loop(acc, tail)
+ }
+ }
+ }
+ loop(Nil, definitions)
+ }
}
}
diff --git a/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/model/Version.scala b/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/model/Version.scala
index f29e678c..17fa10cf 100644
--- a/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/model/Version.scala
+++ b/baboon-compiler/src/main/scala/io/septimalmind/baboon/typer/model/Version.scala
@@ -16,6 +16,10 @@ case class Version(v: izumi.fundamentals.platform.versions.Version) {
def <=(o: Version)(implicit ord: Ordering[izumi.fundamentals.platform.versions.Version]): Boolean = !(this > o)
def >=(o: Version)(implicit ord: Ordering[izumi.fundamentals.platform.versions.Version]): Boolean = !(this < o)
+
+ def format(prefix: String = "", delimiter: String): String = {
+ s"$prefix${toString.replace(".", delimiter)}"
+ }
}
object Version {
diff --git a/baboon-compiler/src/test/resources/baboon/pkg0/pkg01.baboon b/baboon-compiler/src/test/resources/baboon/pkg0/pkg01.baboon
index 14472828..d497a574 100644
--- a/baboon-compiler/src/test/resources/baboon/pkg0/pkg01.baboon
+++ b/baboon-compiler/src/test/resources/baboon/pkg0/pkg01.baboon
@@ -5,11 +5,13 @@ version "1.0.0"
foreign ObscureInt {
cs = "System.Int32"
scala = "java.lang.Integer"
+ py = "builtins.int"
}
foreign ForeignStruct {
cs = "BaboonDefinitions.Foreign.TestForeignStruct" with { "value-type" = "yes" }
scala = "java.lang.Integer"
+ py = "typing.NamedTuple"
}
diff --git a/baboon-compiler/src/test/resources/baboon/pkg0/pkg02.baboon b/baboon-compiler/src/test/resources/baboon/pkg0/pkg02.baboon
index 0168d306..94a39a99 100644
--- a/baboon-compiler/src/test/resources/baboon/pkg0/pkg02.baboon
+++ b/baboon-compiler/src/test/resources/baboon/pkg0/pkg02.baboon
@@ -14,6 +14,7 @@ enum T1_E1 {
foreign ObscureInt {
cs = "System.Int32"
scala = "java.lang.Integer"
+ py = "builtins.int"
}
root data T1_E1_RET {
diff --git a/baboon-compiler/src/test/resources/baboon/pkg0/pkg03.baboon b/baboon-compiler/src/test/resources/baboon/pkg0/pkg03.baboon
index ce4b3bea..ef5d5a08 100644
--- a/baboon-compiler/src/test/resources/baboon/pkg0/pkg03.baboon
+++ b/baboon-compiler/src/test/resources/baboon/pkg0/pkg03.baboon
@@ -277,16 +277,16 @@ root service I1 {
def testCall (
data in {
}
-
+
data out {
i00: i32
}
-
+
data err {
msg: str
}
)
-
+
def testCall2 (
in = T7_Empty
out = T7_Empty
diff --git a/run b/run
new file mode 100755
index 00000000..e69de29b
diff --git a/test/conv-test-cs/ConvTest/Test_CrossLanguageCompat.cs b/test/conv-test-cs/ConvTest/Test_CrossLanguageCompat.cs
index d4b75c22..b02e14de 100644
--- a/test/conv-test-cs/ConvTest/Test_CrossLanguageCompat.cs
+++ b/test/conv-test-cs/ConvTest/Test_CrossLanguageCompat.cs
@@ -5,13 +5,16 @@
using Baboon.Runtime.Shared;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
+using System.Linq;
namespace ConvTest
{
[TestFixture]
public class Test_CrossLanguageCompat
{
- private readonly string baseDir = Path.GetFullPath(Path.Combine("..", "..", "..", "..", "..", "..", "target", "compat-test"));
+ private readonly string baseDir =
+ Path.GetFullPath(Path.Combine("..", "..", "..", "..", "..", "..", "target", "compat-test"));
+
private readonly BaboonCodecContext ctx = BaboonCodecContext.Default;
// Helper methods
@@ -19,7 +22,8 @@ private AllBasicTypes ReadJsonFile(string source, string format)
{
var file = Path.Combine(baseDir, $"{source}-json", "all-basic-types.json");
var jsonStr = File.ReadAllText(file, Encoding.UTF8);
- using var reader = new JsonTextReader(new StringReader(jsonStr)) { DateParseHandling = DateParseHandling.None };
+ using var reader = new JsonTextReader(new StringReader(jsonStr))
+ { DateParseHandling = DateParseHandling.None };
var jsonToken = JToken.Load(reader);
return AllBasicTypes_JsonCodec.Instance.Decode(ctx, jsonToken);
}
@@ -41,15 +45,15 @@ private void AssertBasicFields(AllBasicTypes data, string label)
Assert.That(data.Vbit, Is.True);
}
- private void PrintComparison(string label, AllBasicTypes scalaData, AllBasicTypes csData)
+ private void PrintComparison(string label, string lang, AllBasicTypes langData, AllBasicTypes csData)
{
Console.WriteLine($"Comparing Scala and C# {label} data:");
- Console.WriteLine($" Scala: vi8={scalaData.Vi8}, vi16={scalaData.Vi16}, vi32={scalaData.Vi32}, vi64={scalaData.Vi64}");
- Console.WriteLine($" C#: vi8={csData.Vi8}, vi16={csData.Vi16}, vi32={csData.Vi32}, vi64={csData.Vi64}");
- Console.WriteLine($" Scala: vf32={scalaData.Vf32}, vf64={scalaData.Vf64}, vf128={scalaData.Vf128}");
- Console.WriteLine($" C#: vf32={csData.Vf32}, vf64={csData.Vf64}, vf128={csData.Vf128}");
- Console.WriteLine($" Scala: vtsu={scalaData.Vtsu}, vtso={scalaData.Vtso}");
- Console.WriteLine($" C#: vtsu={csData.Vtsu}, vtso={csData.Vtso}");
+ Console.WriteLine($" {lang}: vi8={langData.Vi8}, vi16={langData.Vi16}, vi32={langData.Vi32}, vi64={langData.Vi64}");
+ Console.WriteLine($" C#: vi8={csData.Vi8}, vi16={csData.Vi16}, vi32={csData.Vi32}, vi64={csData.Vi64}");
+ Console.WriteLine($" {lang}: vf32={langData.Vf32}, vf64={langData.Vf64}, vf128={langData.Vf128}");
+ Console.WriteLine($" C#: vf32={csData.Vf32}, vf64={csData.Vf64}, vf128={csData.Vf128}");
+ Console.WriteLine($" {lang}: vtsu={langData.Vtsu}, vtso={langData.Vtso}");
+ Console.WriteLine($" C#: vtsu={csData.Vtsu}, vtso={csData.Vtso}");
}
// JSON Tests
@@ -65,6 +69,12 @@ public void CSharp_JSON_Deserialization_Should_Read_CSharp_Generated_JSON()
AssertBasicFields(ReadJsonFile("cs", "C# JSON"), "C# JSON");
}
+ [Test]
+ public void CSharp_JSON_Deserialization_Should_Read_Python_Generated_JSON()
+ {
+ AssertBasicFields(ReadJsonFile("cs", "C# JSON"), "C# JSON");
+ }
+
// UEBA Tests
[Test]
public void CSharp_UEBA_Deserialization_Should_Read_Scala_Generated_UEBA()
@@ -78,13 +88,20 @@ public void CSharp_UEBA_Deserialization_Should_Read_CSharp_Generated_UEBA()
AssertBasicFields(ReadUebaFile("cs", "C# UEBA"), "C# UEBA");
}
+ [Test]
+ public void CSharp_UEBA_Deserialization_Should_Read_Python_Generated_UEBA()
+ {
+ AssertBasicFields(ReadUebaFile("python", "C# UEBA"), "C# UEBA");
+ }
+
+
// Cross-language comparison
[Test]
public void CrossLanguage_Comparison_Should_Verify_Scala_And_CSharp_JSON_Produce_Equivalent_Data()
{
var scalaData = ReadJsonFile("scala", "Scala JSON");
var csData = ReadJsonFile("cs", "C# JSON");
- PrintComparison("JSON", scalaData, csData);
+ PrintComparison("JSON", "Scala", scalaData, csData);
Assert.That(csData, Is.EqualTo(scalaData), "Scala and C# JSON data should be equal");
}
@@ -93,8 +110,26 @@ public void CrossLanguage_Comparison_Should_Verify_Scala_And_CSharp_UEBA_Produce
{
var scalaData = ReadUebaFile("scala", "Scala UEBA");
var csData = ReadUebaFile("cs", "C# UEBA");
- PrintComparison("UEBA", scalaData, csData);
+ PrintComparison("UEBA", "Scala", scalaData, csData);
Assert.That(csData, Is.EqualTo(scalaData), "Scala and C# UEBA data should be equal");
}
+
+ [Test]
+ public void CrossLanguage_Comparison_Should_Verify_Python_And_CSharp_JSON_Produce_Equivalent_Data()
+ {
+ var csData = ReadJsonFile("cs", "C# JSON");
+ var pythonData = ReadJsonFile("python", "python JSON");
+ PrintComparison("JSON", "Python", pythonData, csData);
+ Assert.That(csData, Is.EqualTo(pythonData), "Python and C# JSON data should be equal");
+ }
+
+ [Test]
+ public void CrossLanguage_Comparison_Should_Verify_Python_And_CSharp_UEBA_Produce_Equivalent_Data()
+ {
+ var pythonData = ReadUebaFile("python", "python UEBA");
+ var csData = ReadUebaFile("cs", "C# UEBA");
+ PrintComparison("UEBA", "python", pythonData, csData);
+ Assert.That(csData, Is.EqualTo(pythonData), "python and C# UEBA data should be equal");
+ }
}
-}
+}
\ No newline at end of file
diff --git a/test/conv-test-py/compat_main.py b/test/conv-test-py/compat_main.py
new file mode 100644
index 00000000..7e35a52d
--- /dev/null
+++ b/test/conv-test-py/compat_main.py
@@ -0,0 +1,72 @@
+import io
+from datetime import datetime, timezone, timedelta
+
+from decimal import Decimal
+from pathlib import Path
+from uuid import UUID
+
+from Generated.convtest.testpkg.AllBasicTypes import AllBasicTypes, AllBasicTypes_JsonCodec, AllBasicTypes_UEBACodec
+from Generated.baboon_codecs import BaboonCodecContext, LEDataInputStream, LEDataOutputStream
+
+
+def create_sample_data():
+ return AllBasicTypes(
+ vi8=42,
+ vi16=1234,
+ vi32=123456,
+ vi64=123456789,
+ vu8=200,
+ vu16=50000,
+ vu32=3000000000,
+ vu64=10000000000,
+ vf32=3.14159,
+ vf64=2.718281828,
+ vf128=Decimal("123456789.987654321"),
+ vstr="Hello, Baboon!",
+ vbstr=b'Hello Bytes',
+ vuid=UUID("12345678-1234-5678-1234-567812345678"),
+ vbit=True,
+ vtsu=datetime(2024, 6, 15, 12, 30, 45, 123456, tzinfo=timezone.utc),
+ vtso=datetime(2024, 6, 15, 14, 30, 45, 987654, tzinfo=timezone(timedelta(hours=2))),
+ voptStr="optional value",
+ vlstI32=[1, 2, 3, 4, 5],
+ vsetStr={"apple", "banana", "cherry"},
+ vmapStrI32={"one": 1, "two": 2, "three": 3},
+ voptLst=["nested", "list", "values"],
+ vlstOpt=[10, None, 20, 30],
+ vmapLst={"numbers": [1, 2, 3], "more": [4, 5, 6]},
+ )
+
+
+# create sample data will all basic types
+sample_data = create_sample_data()
+
+# Create output directories - use absolute path relative to project root
+base_dir = Path("../../target/compat-test").resolve()
+json_dir = base_dir / "python-json"
+ueba_dir = base_dir / "python-ueba"
+
+json_dir.mkdir(parents=True, exist_ok=True)
+ueba_dir.mkdir(parents=True, exist_ok=True)
+
+# Serialize to JSON
+json_str = AllBasicTypes_JsonCodec.instance().encode(sample_data)
+json_file_path = json_dir / "all-basic-types.json"
+with open(json_file_path, "w", encoding="utf-8") as f:
+ f.write(json_str)
+
+print(f"Written JSON to {json_file_path}")
+
+# Serialize to UEBA
+memory_stream = io.BytesIO()
+ueba_writer = LEDataOutputStream(memory_stream)
+AllBasicTypes_UEBACodec.instance().encode(BaboonCodecContext.default(), ueba_writer, sample_data)
+ueba_bytes = memory_stream.getvalue()
+
+ueba_file_path = ueba_dir / "all-basic-types.ueba"
+with open(ueba_file_path, "wb") as f:
+ f.write(ueba_bytes)
+
+print(f"Written UEBA to {ueba_file_path}")
+
+print("Python serialization complete!")
diff --git a/test/conv-test-py/requirements.txt b/test/conv-test-py/requirements.txt
new file mode 100644
index 00000000..46857f69
--- /dev/null
+++ b/test/conv-test-py/requirements.txt
@@ -0,0 +1 @@
+pydantic==2.12.5
\ No newline at end of file
diff --git a/test/conv-test-py/test_conversions.py b/test/conv-test-py/test_conversions.py
new file mode 100644
index 00000000..2313a48a
--- /dev/null
+++ b/test/conv-test-py/test_conversions.py
@@ -0,0 +1,19 @@
+from unittest import TestCase
+
+from Generated.convtest.testpkg import v1_0_0
+from Generated.convtest.testpkg.Adt0 import B1
+from Generated.convtest.testpkg import Adt0
+from Generated.convtest.testpkg.baboon_runtime import BaboonConversions, RequiredConversions
+
+class TestConversions(TestCase):
+ def test_derived_conversion_adt_autoupgrade(self):
+ b1 = v1_0_0.Adt0.B1(f="value")
+ conv = BaboonConversions(required=RequiredConversions())
+
+ converted = conv.convert_with_context(conv, b1, v1_0_0.Adt0.B1, B1)
+
+ self.assertEqual(b1.f, converted.f)
+
+ converted2 = conv.convert_with_context(conv, b1, v1_0_0.Adt0.Adt0, Adt0.Adt0)
+
+ self.assertEqual(converted, converted2)
diff --git a/test/cs-stub/BaboonDefinitions/BaboonDefinitions.csproj b/test/cs-stub/BaboonDefinitions/BaboonDefinitions.csproj
index be84dee4..c242b0c9 100644
--- a/test/cs-stub/BaboonDefinitions/BaboonDefinitions.csproj
+++ b/test/cs-stub/BaboonDefinitions/BaboonDefinitions.csproj
@@ -1,4 +1,4 @@
-
+
netstandard2.1
@@ -12,4 +12,4 @@
-
+
\ No newline at end of file
diff --git a/test/cs-stub/BaboonTests/BaboonTests.csproj b/test/cs-stub/BaboonTests/BaboonTests.csproj
index 937bd63d..d4afed2f 100644
--- a/test/cs-stub/BaboonTests/BaboonTests.csproj
+++ b/test/cs-stub/BaboonTests/BaboonTests.csproj
@@ -18,4 +18,4 @@
-
+
\ No newline at end of file
diff --git a/test/cs-stub/ConversionsTest.sln b/test/cs-stub/ConversionsTest.sln
index ae198619..a58e61da 100644
--- a/test/cs-stub/ConversionsTest.sln
+++ b/test/cs-stub/ConversionsTest.sln
@@ -1,4 +1,3 @@
-
Microsoft Visual Studio Solution File, Format Version 12.00
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BaboonTests", "BaboonTests\BaboonTests.csproj", "{5DFBB7B1-5992-44B3-B75E-C228BB133718}"
EndProject
@@ -19,4 +18,4 @@ Global
{2A158F54-564F-4130-894D-9665B70BF3EC}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2A158F54-564F-4130-894D-9665B70BF3EC}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
-EndGlobal
+EndGlobal
\ No newline at end of file
diff --git a/test/py-stub/requirements.txt b/test/py-stub/requirements.txt
new file mode 100644
index 00000000..46857f69
--- /dev/null
+++ b/test/py-stub/requirements.txt
@@ -0,0 +1 @@
+pydantic==2.12.5
\ No newline at end of file