I’m a retired programmer. I’ve used many languages throughout my career, but most of the time I was a C/C++ coder. I am creating an interactive tool using Google Gemini’s A.I. By this I mean going beyond a simple Alexa/Siri question/answer system to actually keeping an open dialogue going with Gemini. I have a client system working great written in Pure Ada and taking advantage of the real-time kernel I’m using on Linux CNC. The server part - I call a relay -is written in Python because I cannot get a pure Ada server written on the Pi because certain libraries are missing or don’t work. These libraries are: GNAT Components Collection (GNATcoll) and GNATCOLL.HTTP.Clients. I even tried AWS, but that was a disaster. I’ll give the python code that works really well. If anyone can point me in the right direction - it would be greatly appreciated.
``
#!/home/frank/mirror_brain/venv/bin/python3
import asyncio
import websockets
import json
import base64
import os
from pathlib import Path— Configuration —
MODEL = “models/gemini-2.5-flash-native-audio-latest”
API_KEY = “My Key”
URI = f"wss://generativelanguage.googleapis.com/ws/google.ai.generativelanguage.v1alpha.GenerativeService.BidiGenerateContent?key={API_KEY}"Path to the Magic Mirror trigger
TRIGGER_FILE = Path(“/home/frank/MagicMirror/modules/trigger.json”)
def update_trigger(status):
“”“Explicitly writes JSON status to the trigger file.”“”
try:
with open(TRIGGER_FILE, ‘w’) as f:
json.dump({“status”: status}, f)
except Exception as e:
print(f"[ERROR] Failed to write trigger: {e}")async def bridge():
SET DEFAULT STATE: Webcam on, Professor hidden
update_trigger(“idle”)
print(f"Connecting to Gemini ({MODEL})...") try: async with websockets.connect( URI, ping_interval=None, ping_timeout=None ) as google_ws: # --- Step 1: Load Context --- context_path = '/home/frank/mirror_brain/professor_context.json' try: with open(context_path, 'r') as f: memories = json.load(f) instr_text = ( "You are the Professor. Speak with a deep, resonant baritone voice. " "While your voice is low-pitched, your speech must be articulate, " "crisp, and intellectually alert. Avoid a slow drawl or dragging " "your words; maintain a steady, professorial pace that reflects " "clear thinking and authority." "Speak with the cadence of a lively lecture" ) print(f"[RELAY] Academic Context loaded.") except Exception as e: instr_text = "You are 'The Professor', a distinguished Oxford academic." # --- Step 2: Gemini Handshake --- setup_msg = { "setup": { "model": MODEL, "generationConfig": { "responseModalities": ["AUDIO"], "speechConfig": { "voiceConfig": { "prebuiltVoiceConfig": { "voiceName": "Charon" } } } }, "systemInstruction": { "parts": [{"text": instr_text}] } } } await google_ws.send(json.dumps(setup_msg)) await google_ws.recv() print("Gemini Handshake Complete. The Professor is in his study.") async def handle_ada(reader, writer): print("\n[RELAY] Ada Linked. Waiting for Professor to speak...") update_trigger("off"); #make sure the Professor is initially off. async def google_to_ada_loop(): try: async for message in google_ws: msg = json.loads(message) if "serverContent" in msg: server_content = msg["serverContent"] model_turn = server_content.get("modelTurn", {}) for part in model_turn.get("parts", []): if "text" in part: print(f"\033[96m[PROFESSOR]: {part['text']}\033[0m") if "inlineData" in part: # PROFESSOR IS TALKING: Show face, hide webcam update_trigger("talking") audio_bytes = base64.b64decode(part["inlineData"]["data"]) writer.write(audio_bytes) await writer.drain() # Check if the specific turn/response is finished if server_content.get("turnComplete"): # TURN COMPLETE: Hide face, show webcam update_trigger("idle") print("[RELAY] Turn Complete. Mouth closed.") except Exception as e: if "BrokenPipeError" not in str(e): print(f"Downlink Error: {e}") update_trigger("idle") receiver_task = asyncio.create_task(google_to_ada_loop()) try: while True: try: data = await asyncio.wait_for(reader.readexactly(1536), timeout=5.0) if not data: break payload = { "realtimeInput": { "mediaChunks": [{ "data": base64.b64encode(data).decode('utf-8'), "mimeType": "audio/pcm;rate=16000" }] } } await google_ws.send(json.dumps(payload)) except asyncio.IncompleteReadError: break except asyncio.TimeoutError: continue except Exception as e: print(f"Uplink Error: {e}") finally: print("[RELAY] The Professor has stepped out.") update_trigger("idle") receiver_task.cancel() writer.close() await writer.wait_closed() server = await asyncio.start_server(handle_ada, '127.0.0.1', 8081) async with server: await server.serve_forever() except Exception as e: print(f"Global Error: {e}") update_trigger("idle")if name == “main”:
async def main():
await bridge()try: asyncio.run(main()) except KeyboardInterrupt: update_trigger("idle")``
Frank