Change to grist instead of spreadsheet, Redis subscribe [GET_CATALOG_CHANNEL, UPDATE_MENU_CHANNEL, ADD_CATALOG_CHANNEL]

This commit is contained in:
Ittipat Lusuk 2026-04-28 11:44:19 +07:00
parent 6201dae51d
commit 15468b9d18

641
main.py
View file

@ -31,6 +31,10 @@ DELETE_MENU_CHANNEL = f"{SERVICE_NAME}/delete/menu"
ADD_CATALOG_CHANNEL = f"{SERVICE_NAME}/add/catalog"
ADD_MENU_CHANNEL = f"{SERVICE_NAME}/add/menu"
# Grist set up ...
GRIST_URL = os.getenv("GRIST_URL")
GRIST_API_KEY = os.getenv("GRIST_API_KEY")
r = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True)
ID_PATTERN = r'[a-zA-Z0-9]{2}-[a-zA-Z0-9]{2}-[a-zA-Z0-9]{2}-[a-zA-Z0-9]{4}'
@ -42,6 +46,11 @@ COUNTRY_MAPPING = {
os.getenv("SHEET_NEW_LAYOUT_THA"),
os.getenv("SHEET_NEW_LAYOUT_V2_THA"),
os.getenv("SHEET_NAME_DESC_V2_THA")
],
"grist_doc_id": [
os.getenv("DOC_ID_NEW_LAYOUT_THA"),
os.getenv("DOC_ID_NEW_LAYOUT_V2_THA"),
os.getenv("DOC_ID_NAME_DESC_V2_THA")
]
},
"tha_premium": {
@ -226,7 +235,7 @@ def redis_message_handler():
elif channel == GET_CATALOG_CHANNEL:
try:
if not FRONTEND_NOTIFY_URL:
print("Warning: FRONTEND_NOTIFY_URL is not set.")
print(f"{SERVICE_NAME} Warning: FRONTEND_NOTIFY_URL is not set.")
continue
result = get_catalogs(country)
@ -284,7 +293,7 @@ def redis_message_handler():
continue
try:
update_sheets(country, content)
update_sheets(country, catalog, content)
print(f"[{SERVICE_NAME}] Update success: {catalog} | User: {user_id}")
except Exception as e:
print(f"[{SERVICE_NAME}] Update error: {e}")
@ -307,6 +316,31 @@ def redis_message_handler():
print(f"[Redis Error] Error processing message: {e}")
traceback.print_exc()
def find_grist_table_id(doc_id, catalog_suffix):
url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables"
headers = {"Authorization": f"Bearer {GRIST_API_KEY}"}
try:
resp = requests.get(url, headers=headers)
if resp.status_code == 200:
tables = resp.json().get("tables", [])
for t in tables:
t_id = t.get("id", "")
reconstructed = reconstruct_table_name(t_id)
if f"file={catalog_suffix}" in reconstructed:
return t_id
label = t.get("fields", {}).get("label", "")
if f"file={catalog_suffix}" in label:
return t_id
return None
except Exception as e:
print(f"Error: {e}")
return None
def send_stream_notification(msg_type: str, content: any, batch_id: str, current_chunk: int, total_chunks: int, user_id: str):
"""
msg_type: "start", "chunk", "end", "error"
@ -338,43 +372,29 @@ def send_stream_notification(msg_type: str, content: any, batch_id: str, current
def process_and_stream_sheet_data(country: str, catalog: str, user_id: str):
batch_id = str(uuid.uuid4())
send_stream_notification("start", {"message": f"Start fetching catalog: {catalog}"}, batch_id, 0, 0, user_id)
try:
config = COUNTRY_MAPPING.get(country)
client = get_gspread_client()
spreadsheet = client.open_by_key(config["spreadsheet_id"])
grist_docs = config.get("grist_doc_id", [])
try:
nl_name = next(s for s in config["sheets"] if "new-layout" in s.lower() and "v2" not in s.lower())
nv2_name = next(s for s in config["sheets"] if "new-layout-v2" in s.lower())
nd_name = next(s for s in config["sheets"] if "name-desc-v2" in s.lower())
except StopIteration:
raise HTTPException(status_code=404, detail="Sheet mapping configuration error")
doc_nl = grist_docs[0]
doc_nv2 = grist_docs[1]
doc_nd = grist_docs[2]
nl_all = spreadsheet.worksheet(nl_name).get_all_values()
nv2_all = spreadsheet.worksheet(nv2_name).get_all_values()
nd_all = spreadsheet.worksheet(nd_name).get_all_values()
full_table_nl = find_grist_table_id(doc_nl, catalog)
full_table_nv2 = find_grist_table_id(doc_nv2, catalog)
# range for new-layout-v2
nv2_start, nv2_end = find_catalog_range(nv2_all, catalog)
if nv2_start is None:
nv2_start, nv2_end = 0, len(nv2_all) - 1
if not full_table_nl:
raise Exception(f"Table for catalog {catalog} not found in New-Layout")
nl_data = fetch_grist_table_data(doc_nl, full_table_nl)
nv2_data = fetch_grist_table_data(doc_nv2, full_table_nv2)
nd_all = fetch_grist_table_data(doc_nd, "Name_desc_v2")
final_result = []
is_in_block = False
for i, nl_row in enumerate(nl_all):
col_a = nl_row[0].strip() if len(nl_row) > 0 else ""
if col_a in ["IGNORE", "-"]: col_a = ""
if not is_in_block:
if catalog in col_a: is_in_block = True
continue
else:
if col_a and catalog not in col_a: break
for i, nl_row in enumerate(nl_data):
name_th = nl_row[2].strip() if len(nl_row) > 2 else ""
name_en = nl_row[3].strip() if len(nl_row) > 3 else ""
@ -387,8 +407,7 @@ def process_and_stream_sheet_data(country: str, catalog: str, user_id: str):
"name_desc_v2": []
}
# Search Targets (G-K, H-L, I-M)
# G(6)-K(10), H(7)-L(11), I(8)-M(12)
# Search Targets
pairs = [
(nl_row[6] if len(nl_row) > 6 else "-", nl_row[10] if len(nl_row) > 10 else "-"),
(nl_row[7] if len(nl_row) > 7 else "-", nl_row[11] if len(nl_row) > 11 else "-"),
@ -400,40 +419,27 @@ def process_and_stream_sheet_data(country: str, catalog: str, user_id: str):
for p1, p2 in pairs:
c1 = p1.strip() if p1.strip() and p1.strip() != "-" else "-"
c2 = p2.strip() if p2.strip() and p2.strip() != "-" else "-"
# Format: {code1},{code2}
if c1 != "-" or c2 != "-":
search_targets.append(f"{c1},{c2}")
# keep to find in name-desc-v2
if c1 != "-" or c2 != "-": search_targets.append(f"{c1},{c2}")
if c1 != "-": individual_codes.append(c1)
if c2 != "-": individual_codes.append(c2)
# find in new-layout-v2 (check I, J, K | Index 8, 9, 10)
for j in range(nv2_start, nv2_end + 1):
nv2_row = nv2_all[j]
if len(nv2_row) > 10: # range max is column K
# I(8), J(9), K(10)
# find in nv2_data
for j, nv2_row in enumerate(nv2_data):
if len(nv2_row) > 10:
v2_vals = [nv2_row[8].strip(), nv2_row[9].strip(), nv2_row[10].strip()]
# Check target have or not in I, J, K
match_v2 = any(t in v2_vals for t in search_targets)
if match_v2:
if any(t in v2_vals for t in search_targets):
for sub_idx in range(3): # 3 row (Name, Desc, Img)
curr_j = j + sub_idx
if curr_j < len(nv2_all):
row_data = nv2_all[curr_j]
row_info = {
"row_index": curr_j + 1,
"cells": []
}
# E-H (index 4-7) with only 2 row
if curr_j < len(nv2_data):
row_data = nv2_data[curr_j]
row_info = {"row_index": curr_j + 1, "cells": []}
if sub_idx < 2:
for c_idx in range(4, 8):
if c_idx < len(row_data):
row_info["cells"].append({
"value": row_data[c_idx],
"value": row_data[c_idx],
"coord": get_coord(curr_j, c_idx)
})
menu_item["new_layout_v2"].append(row_info)
@ -454,18 +460,16 @@ def process_and_stream_sheet_data(country: str, catalog: str, user_id: str):
menu_item["name_desc_v2"].append(nd_info)
final_result.append(menu_item)
# -----------------------------------------------------------------
# init chunk
# --- Stream Chunk ---
CHUNK_SIZE = 10
total_items = len(final_result)
total_chunks = math.ceil(total_items / CHUNK_SIZE)
total_chunks = math.ceil(total_items / CHUNK_SIZE) if total_items > 0 else 1
for i in range(total_chunks):
start_idx = i * CHUNK_SIZE
end_idx = start_idx + CHUNK_SIZE
chunk_data = final_result[start_idx:end_idx]
send_stream_notification("chunk", chunk_data, batch_id, i + 1, total_chunks, user_id)
send_stream_notification("end", {"message": "All data sent successfully"}, batch_id, total_chunks, total_chunks, user_id)
@ -477,57 +481,43 @@ def process_and_stream_sheet_data(country: str, catalog: str, user_id: str):
def get_coord(r, c):
return {"row": r + 1, "col": c + 1}
def find_catalog_range(all_rows, catalog: str):
start = None
end = None
for i, row in enumerate(all_rows):
col_a = row[0].strip() if len(row) > 0 else ""
if start is None:
if catalog in col_a:
start = i
end = i
else:
if col_a and catalog not in col_a:
break
end = i
return start, end
# get catalog
def get_catalogs(country: str):
config = COUNTRY_MAPPING.get(country)
if not config or not config["sheets"]:
print(f"[{SERVICE_NAME}] Country or sheets not found")
if not config or not config.get("grist_doc_id"):
print(f"[{SERVICE_NAME}] Country or Grist doc not found")
raise HTTPException(status_code=404, detail="Grist config not found")
try:
# First sheet in Map
target_sheet_name = config["sheets"][0]
client = get_gspread_client()
spreadsheet = client.open_by_key(config["spreadsheet_id"])
worksheet = spreadsheet.worksheet(target_sheet_name)
doc_nl_id = config["grist_doc_id"][0]
# Get A column
col_a = worksheet.col_values(1)
url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_nl_id}/tables"
headers = {"Authorization": f"Bearer {GRIST_API_KEY}"}
resp = requests.get(url, headers=headers)
catalogs = []
# Skip Row 1 (Index 0 in List)
for row_idx in range(1, len(col_a)):
val = col_a[row_idx].strip()
if not val or val in ["-", "IGNORE"]:
continue
# Specify file=...
# format: Name=Test,file=page_catalog_group_recommend.skt
match = re.search(r'file=([^,]+)', val)
if match:
catalog_name = match.group(1).strip()
lock_info = lock_manager.get_lock_info(country, catalog_name)
if resp.status_code == 200:
tables = resp.json().get("tables", [])
for i, t in enumerate(tables):
t_id = t["id"]
if t_id.startswith("Grist") or t_id.lower() == "name_desc_v2":
continue
reconstructed_name = reconstruct_table_name(t_id)
match = re.search(r'file=([^,]+)', reconstructed_name)
if match:
clean_catalog = match.group(1).strip()
else:
clean_catalog = t_id
lock_info = lock_manager.get_lock_info(country, clean_catalog)
catalogs.append({
"catalog": catalog_name,
"row_index": row_idx + 1, # Index in Google Sheet
"catalog": clean_catalog,
"row_index": i,
"status": "locked" if lock_info["is_locked"] else "free",
"locked_by": lock_info["locked_by"]
})
@ -607,31 +597,35 @@ ADD_CATALOG_CHANNEL
def handle_add_catalog(country: str, catalog_name: str, catalog: str):
config = COUNTRY_MAPPING.get(country)
client = get_gspread_client()
sheet = client.open_by_key(config["spreadsheet_id"])
grist_docs = config.get("grist_doc_id", [])
doc_nl = grist_docs[0]
doc_nv2 = grist_docs[1]
nl_name = next(s for s in config["sheets"] if "new-layout" in s.lower() and "v2" not in s.lower())
ws = sheet.worksheet(nl_name)
table_label = f"Name={catalog_name},file={catalog}"
table_id = re.sub(r'[^a-zA-Z0-9_]', '_', table_label)
table_id = table_id[0].upper() + table_id[1:]
all_values = ws.get_all_values()
headers = {"Authorization": f"Bearer {GRIST_API_KEY}", "Content-Type": "application/json"}
def create_table_in_doc(doc_id):
url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables"
columns = [{"id": chr(65+i), "fields": {"label": chr(65+i)}} for i in range(15)]
payload = {
"tables": [{
"id": table_id,
"columns": columns
}]
}
resp = requests.post(url, headers=headers, json=payload)
if resp.status_code == 200:
print(f"[{SERVICE_NAME}] Created table {table_id} in doc {doc_id}")
else:
print(f"[{SERVICE_NAME}] Failed to create table: {resp.text}")
if any(catalog in cell for row in all_values for cell in row):
print(f"[{SERVICE_NAME}] catalog is already exists")
return
# find final row
last_row = 0
for i, row in enumerate(all_values, start=1):
if any(cell.strip() for cell in row):
last_row = i
insert_row = last_row + 2
value = f"Name={catalog_name},file={catalog}"
ws.update(f"A{insert_row}", [[value]])
print(f"[{SERVICE_NAME}] added at row {insert_row}")
create_table_in_doc(doc_nl)
create_table_in_doc(doc_nv2)
"""
@ -639,83 +633,62 @@ UPDATE_MENU_CHANNEL
"""
# update sheet
def update_sheets(country: str, content: list):
def update_sheets(country: str, catalog: str, content: list):
config = COUNTRY_MAPPING.get(country)
client = get_gspread_client()
spreadsheet = client.open_by_key(config["spreadsheet_id"])
requests_map = build_batch_requests(content)
for sheet_name in config["sheets"]:
sheet_key = None
if "new-layout-v2" in sheet_name.lower():
sheet_key = "new_layout_v2"
elif "name-desc-v2" in sheet_name.lower():
sheet_key = "name_desc_v2"
else:
sheet_key = "new_layout"
batch_data = requests_map.get(sheet_key, [])
if not batch_data:
continue
worksheet = spreadsheet.worksheet(sheet_name)
worksheet.batch_update(batch_data, value_input_option="USER_ENTERED")
# payload -> batch requests
def build_batch_requests(content):
requests_map = {
"new_layout": {},
"new_layout_v2": {},
"name_desc_v2": {}
grist_docs = config.get("grist_doc_id", [])
doc_map = {
"new_layout": grist_docs[0],
"new_layout_v2": grist_docs[1],
"name_desc_v2": grist_docs[2]
}
for item in content:
for sheet_key in requests_map.keys():
rows = item.get(sheet_key, [])
full_table_name = find_grist_table_id(grist_docs[0], catalog)
if not full_table_name:
raise Exception(f"Table for catalog {catalog} not found in New-Layout")
headers = {"Authorization": f"Bearer {GRIST_API_KEY}", "Content-Type": "application/json"}
# {"id": row_index, "fields": {"A": val, "B": val}}
for item in content:
for sheet_key in ["new_layout", "new_layout_v2", "name_desc_v2"]:
rows = item.get(sheet_key, [])
if sheet_key == "new_layout":
rows = [rows] if rows else []
if not rows:
continue
doc_id = doc_map[sheet_key]
target_table = "Name_desc_v2" if sheet_key == "name_desc_v2" else full_table_name
records_to_update = []
for row in rows:
cells = row.get("cells", [])
if not cells:
continue
if not cells: continue
row_i = cells[0]["coord"]["row"]
if row_i not in requests_map[sheet_key]:
requests_map[sheet_key][row_i] = {}
fields = {}
for cell in cells:
col_i = cell["coord"]["col"]
val = cell["value"]
col_letter = col_to_letter(cell["coord"]["col"]) # column A, B, C...
fields[col_letter] = str(cell["value"])
requests_map[sheet_key][row_i][col_i] = val
# convert → batch format
final_map = {k: [] for k in requests_map}
for sheet_key, rows in requests_map.items():
for row_i, cols in rows.items():
sorted_cols = sorted(cols.items())
chunks = split_into_chunks(sorted_cols)
for chunk in chunks:
start_col = chunk[0][0]
end_col = chunk[-1][0]
values = [v for _, v in chunk]
final_map[sheet_key].append({
"range": f"{col_to_letter(start_col)}{row_i}:{col_to_letter(end_col)}{row_i}",
"values": [values]
records_to_update.append({
"id": row_i,
"fields": fields
})
return final_map
if records_to_update:
update_url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables/{target_table}/records"
resp = requests.patch(update_url, headers=headers, json={"records": records_to_update})
if resp.status_code != 200:
print(f"[{SERVICE_NAME}] Grist update failed for {target_table}: {resp.text}")
# column to A1 spreadsheet format
def col_to_letter(col: int) -> str:
@ -725,20 +698,6 @@ def col_to_letter(col: int) -> str:
result = chr(65 + rem) + result
return result
def split_into_chunks(sorted_cols):
chunks = []
current = [sorted_cols[0]]
for prev, curr in zip(sorted_cols, sorted_cols[1:]):
if curr[0] == prev[0] + 1:
current.append(curr)
else:
chunks.append(current)
current = [curr]
chunks.append(current)
return chunks
"""
DELETE_MENU_CHANNEL
@ -946,4 +905,292 @@ def exit_room(req: RoomRequest):
if lock_manager.release(country, req.catalog, req.user_id):
return {"status": "success", "message": "Successfully exited"}
else:
return {"status": "success", "message": "You no longer have permission"}
return {"status": "success", "message": "You no longer have permission"}
@app.get("/grist/pull/sheet/{country}")
def grist_pull_sheet_api(country: str):
try:
sync_sheets_to_grist(country)
except Exception as e:
traceback.print_exc()
raise HTTPException(status_code=500, detail=str(e))
@app.get("/grist/push/data/sheet/{country}")
def grist_push_data_sheet_api(country: str):
try:
sync_grist_to_sheets(country)
except Exception as e:
traceback.print_exc()
raise HTTPException(status_code=500, detail=str(e))
"""
Grist function endpoint
[ Grist sync sheet to grist ]
"""
def get_column_letter(n):
"""0 -> A, 1 -> B, 25 -> Z, 26 -> AA"""
result = ""
n += 1
while n > 0:
n, remainder = divmod(n - 1, 26)
result = chr(65 + remainder) + result
return result
def sync_sheets_to_grist(country_key):
if country_key not in COUNTRY_MAPPING:
print(f"Error: Country '{country_key}' not found.")
return
config = COUNTRY_MAPPING[country_key]
spreadsheet_id = config["spreadsheet_id"]
sheet_names = config["sheets"]
grist_doc_ids = config.get("grist_doc_id", [])
gc = get_gspread_client()
spreadsheet = gc.open_by_key(spreadsheet_id)
for index, sheet_name in enumerate(sheet_names):
if not sheet_name or index >= len(grist_doc_ids):
continue
doc_id = grist_doc_ids[index]
try:
worksheet = spreadsheet.worksheet(sheet_name)
all_values = worksheet.get_all_values()
if not all_values:
continue
header = all_values[0]
data_rows = all_values[1:]
if "new-layout" in sheet_name.lower():
process_new_layout_sheet(doc_id, header, data_rows)
elif "name-desc-v2" in sheet_name.lower():
upload_to_grist_self_hosted(doc_id, "name-desc-v2", header, data_rows)
except Exception as e:
print(f"[{SERVICE_NAME}] Error processing sheet {sheet_name}: {e}")
def process_new_layout_sheet(doc_id, header, data_rows):
table_groups = {}
current_table_name = None
for row in data_rows:
col_a = row[0] if len(row) > 0 else ""
if col_a.startswith("Name="):
current_table_name = col_a
table_groups[current_table_name] = []
elif current_table_name:
table_groups[current_table_name].append(row)
for table_name, rows in table_groups.items():
if rows:
upload_to_grist_self_hosted(doc_id, table_name, header, rows)
def upload_to_grist_self_hosted(doc_id, table_name, header, rows):
clean_table_id = re.sub(r'[^a-zA-Z0-9_]', '_', table_name)
clean_table_id = clean_table_id[0].upper() + clean_table_id[1:]
chunk_size = 500
if clean_table_id[0].isdigit():
clean_table_id = f"t_{clean_table_id}"
base_url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables"
headers = {
"Authorization": f"Bearer {GRIST_API_KEY}",
"Content-Type": "application/json"
}
column_defs = []
for i, h_text in enumerate(header):
col_letter = get_column_letter(i)
column_defs.append({
"id": col_letter,
"fields": {"label": h_text.strip() or col_letter}
})
try:
check_resp = requests.get(f"{base_url}/{clean_table_id}/records", headers=headers)
if check_resp.status_code == 200:
existing = check_resp.json().get("records", [])
if existing:
row_ids = [r["id"] for r in existing]
for i in range(0, len(row_ids), chunk_size):
requests.post(f"{base_url}/{clean_table_id}/records/delete",
headers=headers, json=row_ids[i:i+chunk_size])
else:
print(f"[{SERVICE_NAME}] Creating table {clean_table_id} with indexed columns...")
create_resp = requests.post(base_url, headers=headers, json={
"tables": [{"id": clean_table_id, "columns": column_defs}]
})
if create_resp.status_code == 200:
time.sleep(1)
else:
print(f"[{SERVICE_NAME}] Create failed: {create_resp.text}")
return
records_to_add = []
for row in rows:
fields = {}
for i, val in enumerate(row):
if i < len(column_defs):
col_id = column_defs[i]["id"]
fields[col_id] = val
if fields:
records_to_add.append({"fields": fields})
if records_to_add:
for i in range(0, len(records_to_add), chunk_size):
chunk = records_to_add[i:i+chunk_size]
res = requests.post(f"{base_url}/{clean_table_id}/records",
headers=headers, json={"records": chunk})
if res.status_code != 200:
print(f"Error adding data to {clean_table_id}: {res.text}")
print(f"[{SERVICE_NAME}] Synced {len(records_to_add)} rows to {clean_table_id}")
except Exception as e:
print(f"[{SERVICE_NAME}] Error on {clean_table_id}: {e}")
"""
Grist [ Sync grist to google sheet ]
"""
def col_to_index(col_str):
"""Change 'A', 'B', 'AA' back to Index (0, 1, 26)"""
idx = 0
for char in col_str:
idx = idx * 26 + (ord(char) - ord('A') + 1)
return idx - 1
def reconstruct_table_name(t_id):
name = t_id
if name.startswith("Name_"):
name = name.replace("Name_", "Name=", 1)
if "_file_" in name:
name = name.replace("_file_", ",file=", 1)
if name.endswith("_skt"):
name = name[:-4] + ".skt"
return name
def get_all_grist_tables(doc_id):
""" Pull all table name in Document """
url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables"
headers = {"Authorization": f"Bearer {GRIST_API_KEY}"}
resp = requests.get(url, headers=headers)
if resp.status_code == 200:
tables = resp.json().get("tables", [])
return [t["id"] for t in tables if not t["id"].startswith("Grist")]
return []
def fetch_grist_table_data(doc_id, table_id):
""" pull Record and change to {'A': 'val1', 'B': 'val2'} """
url = f"{GRIST_URL.rstrip('/')}/api/docs/{doc_id}/tables/{table_id}/records"
headers = {"Authorization": f"Bearer {GRIST_API_KEY}"}
resp = requests.get(url, headers=headers)
if resp.status_code != 200:
print(f"Failed to fetch table {table_id}: {resp.text}")
return []
records = resp.json().get("records", [])
if not records:
return []
parsed_rows = []
for r in records:
fields = r.get("fields", {})
max_idx = -1
for k in fields.keys():
if re.match(r'^[A-Z]+$', k):
max_idx = max(max_idx, col_to_index(k))
row = [""] * (max_idx + 1)
for k, v in fields.items():
if re.match(r'^[A-Z]+$', k):
row[col_to_index(k)] = str(v) if v is not None else ""
parsed_rows.append(row)
return parsed_rows
def sync_grist_to_sheets(country_key):
if country_key not in COUNTRY_MAPPING:
print(f"Error: Country '{country_key}' not found.")
return
config = COUNTRY_MAPPING[country_key]
spreadsheet_id = config["spreadsheet_id"]
sheet_names = config["sheets"]
grist_doc_ids = config.get("grist_doc_id", [])
gc = get_gspread_client()
spreadsheet = gc.open_by_key(spreadsheet_id)
for index, sheet_name in enumerate(sheet_names):
if not sheet_name or index >= len(grist_doc_ids):
continue
doc_id = grist_doc_ids[index]
if not doc_id:
continue
try:
worksheet = spreadsheet.worksheet(sheet_name)
# Case Name-desc-v2
if "name-desc-v2" in sheet_name.lower():
# print(f"Fetching Name_desc_v2 from doc: {doc_id}...")
rows = fetch_grist_table_data(doc_id, "Name_desc_v2")
if rows:
worksheet.batch_clear(["A2:ZZ"])
worksheet.update(values=rows, range_name="A2")
print(f"[{SERVICE_NAME}] Updated {len(rows)} rows to sheet '{sheet_name}'.")
# Case New-layout / New-layout-v2
elif "new-layout" in sheet_name.lower():
print(f"Fetching multiple tables from doc: {doc_id}...")
all_tables = get_all_grist_tables(doc_id)
all_new_rows = []
for t_id in all_tables:
if t_id.lower() == "name_desc_v2":
continue
rows = fetch_grist_table_data(doc_id, t_id)
if rows:
original_name = reconstruct_table_name(t_id)
all_new_rows.append([original_name])
all_new_rows.extend(rows)
if all_new_rows:
worksheet.batch_clear(["A2:ZZ"])
worksheet.update(values=all_new_rows, range_name="A2")
print(f"[{SERVICE_NAME}] Updated {len(all_new_rows)} lines (incl. Table Names) to sheet '{sheet_name}'.")
else:
print(f"[{SERVICE_NAME}] No data found in any tables for sheet '{sheet_name}'.")
except Exception as e:
print(f"[{SERVICE_NAME}] Error syncing to sheet {sheet_name}: {e}")