diff --git a/tools/log-converter/log_converter_gemini.py b/tools/log-converter/log_converter_gemini.py
new file mode 100644
index 0000000000000000000000000000000000000000..5da4bc9f2be5e8760668c6479f823e3b0b92863f
--- /dev/null
+++ b/tools/log-converter/log_converter_gemini.py
@@ -0,0 +1,181 @@
+import os
+from pathlib import Path
+
+import click
+import polars as pl
+
+MAIN_FLYING_FMM_STATE = 10
+SECONDS_BEFORE_FLYING = 15
+
+
+@click.command()
+@click.argument(
+    "path",
+    type=click.Path(exists=True),
+    default="../../../antenna-tracking/gemini-flight-logs/RoccarasoFlight/data/",
+)
+@click.option("--output", type=click.Path(), default=".")
+def main(path: Path, output: Path):
+    """
+    Convert the logs found in the given path to a format compliant with ARPIST.
+    """
+    # now walk in the directory pointed by path the files: main_Boardcore_EventData.csv, main_Boardcore_NASState.csv, main_Boardcore_ReferenceValues.csv and save their path to variables
+    nas_controller_status_path = None
+    nas_state_path = None
+    fmm_status_path = None
+    gps_path = None
+    ada_state_path = None
+
+    for p, _, fn in os.walk(path):
+        p = Path(p)
+        if p.name != "MAIN":
+            continue
+        for f in fn:
+            if f == "Main_NASControllerStatus.csv":
+                nas_controller_status_path = os.path.join(p, f)
+            elif f == "Main_FlightModeManagerStatus.csv":
+                fmm_status_path = os.path.join(p, f)
+            elif f == "Boardcore_NASState.csv":
+                nas_state_path = os.path.join(p, f)
+            elif f == "Boardcore_UBXGPSData.csv":
+                gps_path = os.path.join(p, f)
+            elif f == "Boardcore_ADAState.csv":
+                ada_state_path = os.path.join(p, f)
+
+    if nas_controller_status_path is None:
+        raise FileNotFoundError("NAS controller status file not found")
+    if nas_state_path is None:
+        raise FileNotFoundError("NAS state file not found")
+    if fmm_status_path is None:
+        raise FileNotFoundError("FMM status file not found")
+    if gps_path is None:
+        raise FileNotFoundError("GPS data file not found")
+    if ada_state_path is None:
+        raise FileNotFoundError("ADA state file not found")
+
+    nas_controller_status = pl.read_csv(nas_controller_status_path)
+    nas_state = pl.read_csv(nas_state_path)
+    fmm_status = pl.read_csv(fmm_status_path)
+    gps_data = pl.read_csv(gps_path, infer_schema_length=10000)
+    ada_state = pl.read_csv(ada_state_path)
+
+    # sort by timestamp and extract the timestamp associated to the calibrate event and topic
+    nas_controller_status = nas_controller_status.sort("timestamp")
+    calibrate_tms = nas_controller_status.select("timestamp")[0, 0]
+
+    # normalize gps data
+    gps_data = gps_data.select(
+        pl.from_epoch(pl.col("gpsTimestamp"), time_unit="us").alias("timestamp"),
+        "latitude",
+        "longitude",
+    ).sort("timestamp")
+
+    (lat, lon) = (
+        gps_data.filter(pl.col("timestamp") >= calibrate_tms).head(1).to_numpy()[0, 1:]
+    )
+
+    # normalize ada state
+    ada_state = ada_state.select(
+        pl.from_epoch(pl.col("timestamp"), time_unit="us"),
+        pl.col("mslAltitude").alias("altitude"),
+    ).sort("timestamp")
+
+    alt = (
+        ada_state.filter(pl.col("timestamp") >= calibrate_tms).head(1).to_numpy()[0, 1]
+    )
+
+    # create a fake reference values dataframe
+    reference_values = pl.DataFrame(
+        {
+            "timestamp": [calibrate_tms],
+            "refLatitude": [lat],
+            "refLongitude": [lon],
+            "refAltitude": [alt],
+        }
+    )
+
+    # select cols
+    reference_values = reference_values.select(
+        pl.from_epoch(pl.col("timestamp"), time_unit="us"),
+        pl.col("refLatitude").alias("latitude"),
+        pl.col("refLongitude").alias("longitude"),
+        pl.col("refAltitude").alias("altitude"),
+    ).sort("timestamp")
+    nas_state = nas_state.select(
+        pl.from_epoch(pl.col("timestamp"), time_unit="us"),
+        "n",
+        "e",
+        "d",
+        "vn",
+        "ve",
+        "vd",
+    ).sort("timestamp")
+    fmm_status = fmm_status.select(
+        pl.from_epoch(pl.col("timestamp"), time_unit="us"), "state"
+    ).sort("timestamp")
+
+    # stop after NAS d is stable around 0 ca.
+    last_fmm_state_ts = fmm_status.select("timestamp").tail(1)[0, 0]
+    stop_ts = (
+        nas_state.filter(pl.col("timestamp") >= last_fmm_state_ts)
+        .group_by_dynamic("timestamp", every="1s")
+        .agg(pl.col("d").mean())
+        .filter(pl.col("d") > 0)  # fitted on gemini data
+        .head(1)[0, 0]
+    ) + pl.duration(seconds=10)
+
+    # filter the reference values and nas state dataframes
+    reference_values = reference_values.filter(pl.col("timestamp") <= stop_ts)
+    nas_state = nas_state.filter(pl.col("timestamp") <= stop_ts)
+
+    # find max timestamp
+    max_ts = max(
+        reference_values.select("timestamp").max().item(0, 0),
+        (nas_state.select("timestamp").max().item(0, 0)),
+    )
+
+    # upsample and downsample the dataframes
+    last_row = reference_values.tail(1)
+    last_row[0, "timestamp"] = max_ts
+    reference_values = pl.concat([reference_values, last_row], how="vertical")
+    reference_values = (
+        reference_values.group_by_dynamic(pl.col("timestamp"), every="500ms")
+        .agg(pl.all().last())
+        .upsample(time_column="timestamp", every="500ms")
+        .fill_null(strategy="forward")
+    )
+    nas_state = (
+        nas_state.group_by_dynamic(pl.col("timestamp"), every="250ms")
+        .agg(pl.all().last())
+        .upsample(time_column="timestamp", every="250ms")
+        .fill_null(strategy="forward")
+    )
+
+    # filter from 15 seconds before flying
+    start_ts = fmm_status.filter(pl.col("state") == MAIN_FLYING_FMM_STATE).select(
+        "timestamp"
+    )[0, 0] - pl.duration(seconds=SECONDS_BEFORE_FLYING)
+    reference_values = reference_values.filter(pl.col("timestamp") >= start_ts)
+    nas_state = nas_state.filter(pl.col("timestamp") >= start_ts)
+
+    # save the dataframes to csv
+    output = Path(output)
+    reference_values.select(
+        pl.col("timestamp").dt.timestamp(time_unit="us"),
+        "latitude",
+        "longitude",
+        "altitude",
+    ).write_csv(output / "low_rate.csv")
+    nas_state.select(
+        pl.col("timestamp").dt.timestamp(time_unit="us"),
+        "n",
+        "e",
+        "d",
+        "vn",
+        "ve",
+        "vd",
+    ).write_csv(output / "high_rate.csv")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/log-converter/log_converter.py b/tools/log-converter/log_converter_lyra.py
similarity index 100%
rename from tools/log-converter/log_converter.py
rename to tools/log-converter/log_converter_lyra.py
diff --git a/tools/log-converter/pyproject.toml b/tools/log-converter/pyproject.toml
index a13734d7320fae194d1080b010ad246229480aff..2f5af910d22e30992bea806d967102f9eb3b5d76 100644
--- a/tools/log-converter/pyproject.toml
+++ b/tools/log-converter/pyproject.toml
@@ -4,11 +4,4 @@ version = "0.1.0"
 description = "Add your description here"
 readme = "README.md"
 requires-python = ">=3.12"
-dependencies = [
-    "click>=8.1.7",
-    "numpy>=2.1.1",
-    "polars>=1.8.2",
-]
-
-[project.scripts]
-log-converter = "log_converter:main"
+dependencies = ["click>=8.1.7", "numpy>=2.1.1", "polars>=1.8.2"]
diff --git a/tools/log-converter/requirements.txt b/tools/log-converter/requirements.txt
index f348166d39341e8acfe970a09effe94f2730c0ad..1cedce197c779b0fbaeab5c79e07be575b698a1c 100644
--- a/tools/log-converter/requirements.txt
+++ b/tools/log-converter/requirements.txt
@@ -1,2 +1,3 @@
 click==8.1.7
+numpy==2.1.1
 polars==1.8.2