feature request: make edgedb-py able to generate input models for insert/update queries
As of now, edgedb-py generates a query function and a response model for a given query, but does not structure the input parameters in any way. This makes sense for most queries, but leaves a little to be desired on inserts and updates. It feels tedious to write a pydantic model containing all the fields that edgedb already knows about each time. Also, having to sync it to the query manually when there is a change in the query parameters adds maintenance cost.
I therefore propose that edgedb-py should be able to generate a pydantic input model for insert and update queries. This would make it perfect to use with FastAPI! This feature could be the new default or activated by a flag, depending on what makes more sense.
Actually, now that I read #358, maybe it would make sense to be able to generate them for every kind of query? Would probably be less complex to implement too.
I'm really late to this, but I have a script I run after codegen to do exactly this. Perhaps it would be helpful here.
#!/usr/bin/env python3
from collections.abc import Sequence
from pathlib import Path
from myapplication.q import async_ as module # CHANGE ME
file = "YOU WILL WANT TO CHANGE THIS"
indent = " "
def _to_camel(s: str) -> str:
set_cap = False
result = s[0].capitalize()
for char in s[1:]:
if char == "_":
set_cap = True
continue
if set_cap:
result += char.capitalize()
set_cap = False
continue
result += char
return result
def gather_inputs(
fn, additional_ignores: Sequence[str] | None = None
) -> dict[str, str]:
ignores = ["return"]
if additional_ignores:
ignores += additional_ignores
ants = fn.__annotations__
return {
name: repr(hint).strip("'")
for name, hint in ants.items()
if name not in ignores
}
def gather_imports(filtered_annotations: dict[str, str]) -> set[str]:
defaults = set("bool bytes int float str set dict list".split())
return {i for i in filtered_annotations.values() if i not in defaults}
def gather_functions(module):
functions = []
for item in dir(module):
if not callable(getattr(module, item)):
continue
if item[0].isupper():
continue
functions.append(item)
return functions
def generate_class(name: str, inputs: dict[str, str]):
# buffer = f"@dataclasses.dataclass\nclass {_to_camel(name)}:\n"
assert inputs, f"Function {name} has no inputs."
buffer = f"class {_to_camel(name)}(pydantic.BaseModel):\n"
for name, hint in inputs.items():
buffer += f"{indent}{name}: {hint}\n"
buffer += "\n"
return buffer
def generate_imports(imports: set[str]) -> str:
return "\n".join(sorted([f'import {i.split(".")[0]}' for i in imports]))
def main():
imports = {"dataclasses"}
output = ""
for fn in gather_functions(module):
inputs = gather_inputs(getattr(module, fn), ["executor"])
if not inputs:
continue
imports = imports | gather_imports(inputs)
output += generate_class(fn, inputs)
with Path.open("models.py", "w") as f:
# f.write(generate_imports(imports))
f.write("\n\n\n")
f.write(output)
if __name__ == "__main__":
print("Generating Input Dataclasses.")
main()