Skip to content

fastflowtransform.executors.base

BaseExecutor

Bases: ABC

Shared workflow for SQL rendering and Python models. I/O is frame-agnostic; subclasses provide frame-specific hooks: - _read_relation - _materialize_relation - _validate_required - _columns_of - _is_frame - (optional) _frame_name

Source code in src/fastflowtransform/executors/base.py
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
class BaseExecutor[TFrame](ABC):
    """
    Shared workflow for SQL rendering and Python models.
    I/O is frame-agnostic; subclasses provide frame-specific hooks:
      - _read_relation
      - _materialize_relation
      - _validate_required
      - _columns_of
      - _is_frame
      - (optional) _frame_name
    """

    # ---------- SQL ----------
    def render_sql(
        self,
        node: Node,
        env: Environment,
        ref_resolver: Callable[[str], str] | None = None,
        source_resolver: Callable[[str, str], str] | None = None,
    ) -> str:
        # ---- thread-/task-local config()-hook
        _RENDER_CFG: contextvars.ContextVar[dict[str, Any] | None] = contextvars.ContextVar(
            "_RENDER_CFG", default=None
        )

        def get_render_cfg() -> dict[str, Any]:
            cfg = _RENDER_CFG.get()
            if cfg is None:
                cfg = {}
                _RENDER_CFG.set(cfg)
            return cfg

        def _config_hook(**kwargs: Any) -> str:
            cfg = get_render_cfg()  # garantiert ein Dict
            cfg.update(kwargs)  # gleiche Referenz, kein erneutes set() nötig
            return ""  # nichts in SQL emittieren

        if "config" not in env.globals:
            env.globals["config"] = _config_hook

        # ---- var() builtin: CLI overrides > project.yml vars > default
        if "var" not in env.globals:

            def _var(key: str, default: Any = None) -> Any:
                cli = getattr(REGISTRY, "cli_vars", {}) or {}
                if key in cli:
                    return cli[key]
                proj = getattr(REGISTRY, "project_vars", {}) or {}
                if key in proj:
                    return proj[key]
                return default

            env.globals["var"] = _var

        # ---- is_incremental() builtin
        # True iff materialization is 'incremental' AND the target relation already exists.
        if "is_incremental" not in env.globals:

            def _is_incremental() -> bool:
                try:
                    mat = (getattr(node, "meta", {}) or {}).get("materialized", "table")
                    if mat != "incremental":
                        return False
                    rel = relation_for(node.name)
                    return bool(self.exists_relation(rel))
                except Exception:
                    # Be conservative: if anything is off, treat as non-incremental.
                    return False

            env.globals["is_incremental"] = _is_incremental

        raw = Path(node.path).read_text(encoding="utf-8")
        tmpl = env.from_string(raw)

        def _default_ref(name: str) -> str:
            return relation_for(name)

        def _default_source(source_name: str, table_name: str) -> str:
            group = REGISTRY.sources.get(source_name)
            if not group:
                raise KeyError(f"Unknown source {source_name}.{table_name}")
            entry = group.get(table_name)
            if not entry:
                raise KeyError(f"Unknown source {source_name}.{table_name}")
            cfg = resolve_source_entry(entry, self.engine_name, default_identifier=table_name)
            if cfg.get("location"):
                raise KeyError(
                    "Path-based sources require executor context; "
                    "default resolver cannot handle them."
                )
            identifier = cfg.get("identifier")
            if not identifier:
                raise KeyError(f"Source {source_name}.{table_name} missing identifier")
            return identifier

        _RENDER_CFG.set({})

        # expose 'this' to the template: Proxy-Objekt, das wie String wirkt
        this_obj = _ThisProxy(
            relation_for(node.name),
            (getattr(node, "meta", {}) or {}).get("materialized", "table"),
            getattr(self, "schema", None) or getattr(self, "dataset", None),
            getattr(self, "database", None) or getattr(self, "project", None),
        )

        sql = tmpl.render(
            ref=ref_resolver or _default_ref,
            source=source_resolver or _default_source,
            this=this_obj,
        )

        cfg = _RENDER_CFG.get()
        if cfg:
            for k, v in cfg.items():
                node.meta.setdefault(k, v)
        return sql

    def run_sql(self, node: Node, env: Environment) -> None:
        """
        Orchestrate SQL models:
          1) Render Jinja (ref/source/this) and strip leading {{ config(...) }}.
          2) If the SQL is full DDL (CREATE …), execute it verbatim (passthrough).
          3) Otherwise, normalize to CREATE OR REPLACE {TABLE|VIEW} AS <body>.
             The body is CTE-aware (keeps WITH … SELECT … intact).
        On failure, raise ModelExecutionError with a helpful snippet.
        """
        sql_rendered = self.render_sql(
            node,
            env,
            ref_resolver=lambda name: self._resolve_ref(name, env),
            source_resolver=self._resolve_source,
        )
        sql = self._strip_leading_config(sql_rendered).strip()

        materialization = (node.meta or {}).get("materialized", "table")
        if materialization == "ephemeral":
            return

        # 1) Direct DDL passthrough (CREATE [OR REPLACE] {TABLE|VIEW} …)
        if self._looks_like_direct_ddl(sql):
            try:
                self._execute_sql_direct(sql, node)
                return
            except NotImplementedError:
                # Engine doesn't implement direct DDL → fall back to normalized materialization.
                pass
            except Exception as e:
                raise ModelExecutionError(
                    node_name=node.name,
                    relation=relation_for(node.name),
                    message=str(e),
                    sql_snippet=sql,
                ) from e

        # 2) Normalized materialization path (CTE-safe body)
        body = self._selectable_body(sql).rstrip(" ;\n\t")
        target_sql = self._format_relation_for_ref(node.name)

        # Centralized SQL preview logging (applies to ALL engines)
        preview = (
            f"=== MATERIALIZE ===\n"
            f"-- model: {node.name}\n"
            f"-- materialized: {materialization}\n"
            f"-- target: {target_sql}\n"
            f"{body}\n"
        )
        echo_debug(preview)

        try:
            self._apply_sql_materialization(node, target_sql, body, materialization)
        except Exception as e:
            preview = f"-- materialized={materialization}\n-- target={target_sql}\n{body}"
            raise ModelExecutionError(
                node_name=node.name,
                relation=relation_for(node.name),
                message=str(e),
                sql_snippet=preview,
            ) from e

    # --- Helpers for materialization & ephemeral inlining (instance methods) ---
    def _first_select_body(self, sql: str) -> str:
        """
        Fallback: extract the substring starting at the first SELECT token.
        If no SELECT is found, return the original string unchanged.
        Prefer using _selectable_body() which is CTE-aware.
        """
        m = re.search(r"\bselect\b", sql, flags=re.I | re.S)
        return sql[m.start() :] if m else sql

    def _strip_leading_config(self, sql: str) -> str:
        """
        Remove a leading Jinja {{ config(...) }} so the engine receives clean SQL.
        """
        return re.sub(
            r"^\s*\{\{\s*config\s*\(.*?\)\s*\}\}\s*",
            "",
            sql,
            flags=re.I | re.S,
        )

    def _strip_leading_sql_comments(self, sql: str) -> tuple[str, int]:
        """
        Remove *only* leading SQL comments and blank lines, return (trimmed_sql, start_idx).

        Supports:
          -- single line comments
          /* block comments */
        """
        # Match chain of: whitespace, comment, whitespace, comment, ...
        # Using DOTALL so block comments spanning lines are handled.
        pat = re.compile(
            r"""^\s*(?:
                                --[^\n]*\n        # line comment
                              | /\*.*?\*/\s*      # block comment
                             )*""",
            re.VERBOSE | re.DOTALL,
        )
        m = pat.match(sql)
        start = m.end() if m else 0
        return sql[start:], start

    def _selectable_body(self, sql: str) -> str:
        """
        Return a valid SELECT-able body for CREATE … AS:

          - If the statement starts (after comments/blank lines) with a CTE (WITH …),
            return from the WITH onward.
          - If it starts with SELECT, return from SELECT onward.
          - Otherwise, fall back to the first SELECT heuristic.
        """
        # Keep original for fallback, but check after stripping comments
        s0 = sql
        s, offset = self._strip_leading_sql_comments(sql)
        s_ws = s.lstrip()  # in case comments left some spaces
        head = s_ws[:6].lower()

        if s_ws.startswith(("with ", "with\n", "with\t")):
            # Return from the start of this WITH (preserve exactly s_ws form)
            # Compute index into original string to retain original casing beyond comments
            idx = s.find(s_ws)
            return sql[offset + (idx if idx >= 0 else 0) :].lstrip()

        if head.startswith("select"):
            idx = s.find(s_ws)
            return sql[offset + (idx if idx >= 0 else 0) :].lstrip()

        # Fallback: first SELECT anywhere in the statement
        return self._first_select_body(s0)

    def _looks_like_direct_ddl(self, sql: str) -> bool:
        """
        True if the rendered SQL starts with CREATE (TABLE|VIEW) so it should be
        executed verbatim as a user-provided DDL statement.
        """
        head = sql.lstrip().lower()
        return (
            head.startswith("create table")
            or head.startswith("create view")
            or head.startswith("create or replace")
        )

    def _execute_sql_direct(self, sql: str, node: Node) -> None:
        """
        Execute a full CREATE … statement as-is. Default: use `self.con.execute(sql)`.
        Engines can override this for custom dispatch. If not available, raise
        NotImplementedError so the caller can fall back to normalized materialization.
        """
        con = getattr(self, "con", None)
        if con is None or not hasattr(con, "execute"):
            raise NotImplementedError("Direct DDL execution is not implemented for this executor.")
        con.execute(sql)

    def _render_ephemeral_sql(self, name: str, env: Environment) -> str:
        """
        Render the SQL for an 'ephemeral' model and return it as a parenthesized
        subquery. This is CTE-safe: we keep the full WITH…SELECT… statement and
        only strip the leading {{ config(...) }} and trailing semicolons.
        """
        node = REGISTRY.get_node(name) if hasattr(REGISTRY, "get_node") else REGISTRY.nodes[name]

        raw = Path(node.path).read_text(encoding="utf-8")
        tmpl = env.from_string(raw)

        sql = tmpl.render(
            ref=lambda n: self._resolve_ref(n, env),
            source=self._resolve_source,
            this=_ThisProxy(
                relation_for(node.name),
                (getattr(node, "meta", {}) or {}).get("materialized", "table"),
                getattr(self, "schema", None) or getattr(self, "dataset", None),
                getattr(self, "database", None) or getattr(self, "project", None),
            ),
        )
        # Remove a leading config block and keep the full, CTE-capable statement
        sql = self._strip_leading_config(sql).strip()
        body = self._selectable_body(sql).rstrip(" ;\n\t")
        return f"(\n{body}\n)"

    # ---------- Python models ----------
    def run_python(self, node: Node) -> None:
        func = REGISTRY.py_funcs[node.name]
        deps = REGISTRY.nodes[node.name].deps or []
        if _http_ctx is not None:
            with suppress(Exception):
                _http_ctx.reset_for_node(node.name)

        # Load inputs
        arg: Any
        if len(deps) == 0:
            arg = None
        elif len(deps) == 1:
            rel = relation_for(deps[0])
            df_in: TFrame = self._read_relation(rel, node, deps)
            arg = df_in  # TFrame
        else:
            frames: dict[str, TFrame] = {}
            for dep in deps:
                rel = relation_for(dep)
                f = self._read_relation(rel, node, deps)
                frames[rel] = f
            arg = frames  # dict[str, TFrame]

        # Validate required columns / structure (frame specific)
        requires = REGISTRY.py_requires.get(node.name, {})
        if deps:
            self._validate_required(node.name, arg, requires)

        # Execute the model
        out = func(arg)
        if not self._is_frame(out):
            raise TypeError(
                f"Python-Modell '{node.name}' muss {self._frame_name()} DataFrame zurückgeben."
            )

        # Materialize the result (table default; view supported)
        target = relation_for(node.name)
        mat = (getattr(node, "meta", {}) or {}).get("materialized", "table")
        if mat == "view":
            backing = self._py_view_backing_name(target)
            self._materialize_relation(backing, out, node)
            self._create_or_replace_view_from_table(target, backing, node)
        else:
            self._materialize_relation(target, out, node)

        if _http_ctx is not None:
            try:
                snap = _http_ctx.snapshot()
                (node.meta or {}).update({"_http_snapshot": snap})
            except Exception:
                pass

    # -------- Python model view helpers (shared) --------
    def _py_view_backing_name(self, relation: str) -> str:
        """
        Backing table name for Python models materialized as views.
        Must be a valid identifier for the target engine.
        """
        return f"__ff_py_{relation}"

    @abstractmethod
    def _create_or_replace_view_from_table(
        self, view_name: str, backing_table: str, node: Node
    ) -> None:
        """
        Create (or replace) a VIEW named `view_name` that selects from `backing_table`.
        Implement engine-specific DDL here.
        """
        ...

    # ---------- SQL hook contracts ----------
    @abstractmethod
    def _format_relation_for_ref(self, name: str) -> str:
        """
        Return the engine-specific SQL identifier used to reference a model's materialised relation.
        """
        ...

    @abstractmethod
    def _format_source_reference(
        self, cfg: dict[str, Any], source_name: str, table_name: str
    ) -> str:
        """
        Return the SQL identifier used to reference a configured source.
        """
        ...

    def _apply_sql_materialization(
        self, node: Node, target_sql: str, select_body: str, materialization: str
    ) -> None:
        """
        Materialise the rendered SELECT according to the requested kind (`table`, `view`, ...).
        The default implementation delegates to `create_or_replace_*` hooks.
        """
        if materialization == "view":
            self._create_or_replace_view(target_sql, select_body, node)
        else:
            self._create_or_replace_table(target_sql, select_body, node)

    @abstractmethod
    def _create_or_replace_view(self, target_sql: str, select_body: str, node: Node) -> None:
        """
        Engine-specific implementation for CREATE OR REPLACE VIEW ... AS <body>.
        """
        ...

    @abstractmethod
    def _create_or_replace_table(self, target_sql: str, select_body: str, node: Node) -> None:
        """
        Engine-specific implementation for CREATE OR REPLACE TABLE ... AS <body>.
        """
        ...

    # ---------- Resolution helpers ----------
    def _resolve_ref(self, name: str, env: Environment) -> str:
        dep = REGISTRY.get_node(name) if hasattr(REGISTRY, "get_node") else REGISTRY.nodes[name]
        if dep.meta.get("materialized") == "ephemeral":
            return self._render_ephemeral_sql(dep.name, env)
        return self._format_relation_for_ref(name)

    def _resolve_source(self, source_name: str, table_name: str) -> str:
        group = REGISTRY.sources.get(source_name)
        if not group:
            known = ", ".join(sorted(REGISTRY.sources.keys())) or "<none>"
            raise KeyError(f"Unknown source '{source_name}'. Known sources: {known}")

        entry = group.get(table_name)
        if not entry:
            known_tables = ", ".join(sorted(group.keys())) or "<none>"
            raise KeyError(
                f"Unknown source table '{source_name}.{table_name}'. Known tables: {known_tables}"
            )

        engine_key = self.engine_name
        try:
            cfg = resolve_source_entry(entry, engine_key, default_identifier=table_name)
        except KeyError as exc:
            raise KeyError(
                f"Source {source_name}.{table_name} missing "
                f"identifier/location for engine '{engine_key}'"
            ) from exc

        cfg = dict(cfg)
        cfg.setdefault("options", {})
        return self._format_source_reference(cfg, source_name, table_name)

    # ---------- Abstrakte Frame-Hooks ----------
    @abstractmethod
    def _read_relation(self, relation: str, node: Node, deps: Iterable[str]) -> TFrame: ...

    @abstractmethod
    def _materialize_relation(self, relation: str, df: TFrame, node: Node) -> None: ...

    def _validate_required(
        self, node_name: str, inputs: Any, requires: dict[str, set[str]]
    ) -> None:
        """
        inputs: either TFrame (single dependency) or dict[str, TFrame] (multiple dependencies)
        raises: ValueError with a clear explanation when columns/keys are missing
        """
        if not requires:
            return

        validate_required_columns(node_name, inputs, requires)

    def _columns_of(self, frame: TFrame) -> list[str]:
        """List of columns for debug logging."""
        columns = getattr(frame, "columns", None)
        if columns is not None:
            return [str(c) for c in list(columns)]
        raise NotImplementedError("_columns_of needs to be implemented for non-pandas frame types")

    def _is_frame(self, obj: Any) -> bool:
        """Is 'obj' a valid frame for this executor?"""
        return isinstance(obj, _PDDataFrame)

    def _frame_name(self) -> str:
        """Only used when formatting error messages (default)."""
        return "a"

    # ---------- Build meta hook ----------
    def on_node_built(self, node: Node, relation: str, fingerprint: str) -> None:
        """
        Hook invoked after a node has been successfully materialized.
        Engines should override this to write/update the meta table (e.g. _ff_meta).

        Default: no-op.
        """
        return

    # ── Incremental API ───────────────────────────────────────────────
    def exists_relation(self, relation: str) -> bool:  # pragma: no cover - abstract
        """Returns True if physical relation exists (table/view)."""
        raise NotImplementedError

    def create_table_as(self, relation: str, select_sql: str) -> None:  # pragma: no cover
        """CREATE TABLE AS SELECT …"""
        raise NotImplementedError

    def incremental_insert(self, relation: str, select_sql: str) -> None:  # pragma: no cover
        """INSERT-only (Append)."""
        raise NotImplementedError

    def incremental_merge(
        self, relation: str, select_sql: str, unique_key: list[str]
    ) -> None:  # pragma: no cover
        """Best-effort UPSERT; Default fallback via staging delete+insert."""
        raise NotImplementedError

    def alter_table_sync_schema(
        self, relation: str, select_sql: str, *, mode: str = "append_new_columns"
    ) -> None:  # pragma: no cover
        """
        Optional: Additive schema synchronisation. 'mode' = append_new_columns|sync_all_columns.
        Default implementation: No-Op.
        """
        return None

    ENGINE_NAME: str = "generic"

    @property
    def engine_name(self) -> str:
        return getattr(self, "ENGINE_NAME", "generic")

run_sql

run_sql(node, env)
Orchestrate SQL models

1) Render Jinja (ref/source/this) and strip leading {{ config(...) }}. 2) If the SQL is full DDL (CREATE …), execute it verbatim (passthrough). 3) Otherwise, normalize to CREATE OR REPLACE {TABLE|VIEW} AS . The body is CTE-aware (keeps WITH … SELECT … intact).

On failure, raise ModelExecutionError with a helpful snippet.

Source code in src/fastflowtransform/executors/base.py
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
def run_sql(self, node: Node, env: Environment) -> None:
    """
    Orchestrate SQL models:
      1) Render Jinja (ref/source/this) and strip leading {{ config(...) }}.
      2) If the SQL is full DDL (CREATE …), execute it verbatim (passthrough).
      3) Otherwise, normalize to CREATE OR REPLACE {TABLE|VIEW} AS <body>.
         The body is CTE-aware (keeps WITH … SELECT … intact).
    On failure, raise ModelExecutionError with a helpful snippet.
    """
    sql_rendered = self.render_sql(
        node,
        env,
        ref_resolver=lambda name: self._resolve_ref(name, env),
        source_resolver=self._resolve_source,
    )
    sql = self._strip_leading_config(sql_rendered).strip()

    materialization = (node.meta or {}).get("materialized", "table")
    if materialization == "ephemeral":
        return

    # 1) Direct DDL passthrough (CREATE [OR REPLACE] {TABLE|VIEW} …)
    if self._looks_like_direct_ddl(sql):
        try:
            self._execute_sql_direct(sql, node)
            return
        except NotImplementedError:
            # Engine doesn't implement direct DDL → fall back to normalized materialization.
            pass
        except Exception as e:
            raise ModelExecutionError(
                node_name=node.name,
                relation=relation_for(node.name),
                message=str(e),
                sql_snippet=sql,
            ) from e

    # 2) Normalized materialization path (CTE-safe body)
    body = self._selectable_body(sql).rstrip(" ;\n\t")
    target_sql = self._format_relation_for_ref(node.name)

    # Centralized SQL preview logging (applies to ALL engines)
    preview = (
        f"=== MATERIALIZE ===\n"
        f"-- model: {node.name}\n"
        f"-- materialized: {materialization}\n"
        f"-- target: {target_sql}\n"
        f"{body}\n"
    )
    echo_debug(preview)

    try:
        self._apply_sql_materialization(node, target_sql, body, materialization)
    except Exception as e:
        preview = f"-- materialized={materialization}\n-- target={target_sql}\n{body}"
        raise ModelExecutionError(
            node_name=node.name,
            relation=relation_for(node.name),
            message=str(e),
            sql_snippet=preview,
        ) from e

on_node_built

on_node_built(node, relation, fingerprint)

Hook invoked after a node has been successfully materialized. Engines should override this to write/update the meta table (e.g. _ff_meta).

Default: no-op.

Source code in src/fastflowtransform/executors/base.py
526
527
528
529
530
531
532
533
def on_node_built(self, node: Node, relation: str, fingerprint: str) -> None:
    """
    Hook invoked after a node has been successfully materialized.
    Engines should override this to write/update the meta table (e.g. _ff_meta).

    Default: no-op.
    """
    return

exists_relation

exists_relation(relation)

Returns True if physical relation exists (table/view).

Source code in src/fastflowtransform/executors/base.py
536
537
538
def exists_relation(self, relation: str) -> bool:  # pragma: no cover - abstract
    """Returns True if physical relation exists (table/view)."""
    raise NotImplementedError

create_table_as

create_table_as(relation, select_sql)

CREATE TABLE AS SELECT …

Source code in src/fastflowtransform/executors/base.py
540
541
542
def create_table_as(self, relation: str, select_sql: str) -> None:  # pragma: no cover
    """CREATE TABLE AS SELECT …"""
    raise NotImplementedError

incremental_insert

incremental_insert(relation, select_sql)

INSERT-only (Append).

Source code in src/fastflowtransform/executors/base.py
544
545
546
def incremental_insert(self, relation: str, select_sql: str) -> None:  # pragma: no cover
    """INSERT-only (Append)."""
    raise NotImplementedError

incremental_merge

incremental_merge(relation, select_sql, unique_key)

Best-effort UPSERT; Default fallback via staging delete+insert.

Source code in src/fastflowtransform/executors/base.py
548
549
550
551
552
def incremental_merge(
    self, relation: str, select_sql: str, unique_key: list[str]
) -> None:  # pragma: no cover
    """Best-effort UPSERT; Default fallback via staging delete+insert."""
    raise NotImplementedError

alter_table_sync_schema

alter_table_sync_schema(relation, select_sql, *, mode='append_new_columns')

Optional: Additive schema synchronisation. 'mode' = append_new_columns|sync_all_columns. Default implementation: No-Op.

Source code in src/fastflowtransform/executors/base.py
554
555
556
557
558
559
560
561
def alter_table_sync_schema(
    self, relation: str, select_sql: str, *, mode: str = "append_new_columns"
) -> None:  # pragma: no cover
    """
    Optional: Additive schema synchronisation. 'mode' = append_new_columns|sync_all_columns.
    Default implementation: No-Op.
    """
    return None