Compare commits
6 commits
e05a62cda9
...
8252b6fcf0
| Author | SHA1 | Date | |
|---|---|---|---|
| 8252b6fcf0 | |||
| c0539bcf59 | |||
| 7ee40ccad7 | |||
| 69be5a5493 | |||
| 4fbf2abd08 | |||
| dbc921d98a |
38 changed files with 2143 additions and 375 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
|
@ -1,4 +1,5 @@
|
|||
todo.md
|
||||
.env
|
||||
|
||||
Language*Learning*API/
|
||||
/Language*Learning*API/
|
||||
__pycache__/
|
||||
30
README.md
30
README.md
|
|
@ -1,18 +1,38 @@
|
|||
# Language learning app
|
||||
|
||||
## Description
|
||||
Language Learning App is a set of software packages that deliver a language learning experience for second+ language learners to help boost fluency through exposure to realistic-looking text.
|
||||
|
||||
This is an app designed to help people learn a second(+) language. Initially from English. The app will start with French, Spanish, Italian, and German as the target languages. With English as the only source language.
|
||||
## Thesis statements
|
||||
|
||||
The thesis of the app is that spaced repetition and recall remain effective mechanisms for language acquisition, and that exposure to appropriate, realistic text, can make that process less repetitive or dull. And that mixing text and audio allows the user to know how words "sound".
|
||||
1. Presentation of novel, realistic looking text and accompanying audio provide an engaging, motivating chance for language learners to increase their fluency of another language.
|
||||
|
||||
2. Interacting with a language in non short-form text (i.e. more than one sentence at a time) is more complex, and more beneficial, that interacting with single sentences.
|
||||
|
||||
3. Language learning should focus on the most "useful" words first. Traditional grouping of words (e.g. items of clothing, hobbies) used in traditional education are too abstract. Providing some level of personalisation of topic, and selection of words is important to engagement and motivation.
|
||||
|
||||
## Description of product
|
||||
|
||||
Although spaced repetition is an effective mechanism to better remember words, showing words in context remains an important "before" step. This app adds value by providing the user with realistic-looking written and audio content in the language(s) they are learning at an appropriate level. From there, the user can identify vocabulary that they are unfamiliar with, and would like to commit to memory.
|
||||
|
||||
Additionally, Language Learning App treats the text-audio pair as important. Language learners don't just want to be able to read and write a language, they need to know how words sound
|
||||
|
||||
At present, the app doesn't have a solution to recognising speech, another important part of language learning.
|
||||
|
||||
It improve learner proficiency by building a mechanism for generating realistic, level-appropriate text from user-specified inputs, generated by LLMs. There will also be audio medium, similar to short podcasts, generated from the text.
|
||||
## Technical Specifics
|
||||
|
||||
This is an app designed to help people learn a second(+) language. Initially from English. The app will start with French, Spanish, Italian, and German as the target languages. With English as the only source language.
|
||||
|
||||
The application has a back-end written in python (fastapi), because of the Python ecosystem around data and machine learning.
|
||||
|
||||
The application will have a web front end written in Svelte Kit. It will adopt progressive web app standards, to allow offline use.
|
||||
In the future, the API will generate XML endpoints for postcast-playing app integration (as it's an audio-first medium).
|
||||
|
||||
The application has a web-based front end written in Svelte Kit. It will adopt progressive web app standards, to allow offline use. Due to technical complexity, and limitations, there are no plans for native app development.
|
||||
|
||||
The app relies on containerisation and docker to orchestrate moving parts. In production, there will be a need to consider Content Delivery Networks (CDNs) as high bandwidth is expected.
|
||||
|
||||
Content generation relies heavily on asynchronous jobs.
|
||||
|
||||
The app should rely on self-hostable infrastructure as much as possible. Vendor-specific queueing and messaging protocols (e.g. AWS's SNS) are a liability.
|
||||
|
||||
Communication between the two is through HTTP, authenticated with JWT tokens.
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,43 @@
|
|||
"""add translated_articles table
|
||||
|
||||
Revision ID: 0005
|
||||
Revises: 0004
|
||||
Create Date: 2026-03-27
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision: str = "0005"
|
||||
down_revision: Union[str, None] = "0004"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_table(
|
||||
"translated_articles",
|
||||
sa.Column("id", postgresql.UUID(as_uuid=True), primary_key=True),
|
||||
sa.Column("published_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("source_language", sa.String(10), nullable=False),
|
||||
sa.Column("source_title", sa.Text(), nullable=False),
|
||||
sa.Column("source_body", sa.Text(), nullable=False),
|
||||
sa.Column("target_language", sa.String(10), nullable=False),
|
||||
sa.Column("target_complexities", postgresql.ARRAY(sa.String(5)), nullable=False),
|
||||
sa.Column("target_title", sa.Text(), nullable=False),
|
||||
sa.Column("target_body", sa.Text(), nullable=False),
|
||||
sa.Column("audio_url", sa.Text(), nullable=True),
|
||||
sa.Column("target_body_pos", postgresql.JSONB(), nullable=True),
|
||||
sa.Column("target_body_transcript", postgresql.JSONB(), nullable=True),
|
||||
)
|
||||
op.create_index("ix_translated_articles_published_at", "translated_articles", ["published_at"])
|
||||
op.create_index("ix_translated_articles_target_language", "translated_articles", ["target_language"])
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_translated_articles_target_language", table_name="translated_articles")
|
||||
op.drop_index("ix_translated_articles_published_at", table_name="translated_articles")
|
||||
op.drop_table("translated_articles")
|
||||
|
|
@ -0,0 +1,74 @@
|
|||
"""separate job orchestration from article content
|
||||
|
||||
Revision ID: 0006
|
||||
Revises: 0005
|
||||
Create Date: 2026-03-29
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
revision: str = "0006"
|
||||
down_revision: Union[str, None] = "0005"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Make article content fields nullable — they are now filled in step-by-step
|
||||
op.alter_column("translated_articles", "source_title", nullable=True)
|
||||
op.alter_column("translated_articles", "source_body", nullable=True)
|
||||
op.alter_column("translated_articles", "target_title", nullable=True)
|
||||
op.alter_column("translated_articles", "target_body", nullable=True)
|
||||
|
||||
# Add source_body_pos to translated_articles
|
||||
op.add_column(
|
||||
"translated_articles",
|
||||
sa.Column("source_body_pos", postgresql.JSONB(), nullable=True),
|
||||
)
|
||||
|
||||
# Add FK from jobs to the article they produce
|
||||
op.add_column(
|
||||
"jobs",
|
||||
sa.Column(
|
||||
"translated_article_id",
|
||||
postgresql.UUID(as_uuid=True),
|
||||
sa.ForeignKey("translated_articles.id"),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
# Drop content columns that now live on translated_articles
|
||||
op.drop_column("jobs", "source_language")
|
||||
op.drop_column("jobs", "target_language")
|
||||
op.drop_column("jobs", "complexity_level")
|
||||
op.drop_column("jobs", "input_summary")
|
||||
op.drop_column("jobs", "generated_text")
|
||||
op.drop_column("jobs", "translated_text")
|
||||
op.drop_column("jobs", "audio_url")
|
||||
op.drop_column("jobs", "source_pos_data")
|
||||
op.drop_column("jobs", "target_pos_data")
|
||||
op.drop_column("jobs", "audio_transcript")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.add_column("jobs", sa.Column("audio_transcript", postgresql.JSONB(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("target_pos_data", postgresql.JSONB(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("source_pos_data", postgresql.JSONB(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("audio_url", sa.Text(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("translated_text", sa.Text(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("generated_text", sa.Text(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("input_summary", sa.Text(), nullable=True))
|
||||
op.add_column("jobs", sa.Column("complexity_level", sa.String(5), nullable=False, server_default="B1"))
|
||||
op.add_column("jobs", sa.Column("target_language", sa.String(10), nullable=False, server_default="fr"))
|
||||
op.add_column("jobs", sa.Column("source_language", sa.String(10), nullable=False, server_default="en"))
|
||||
|
||||
op.drop_column("jobs", "translated_article_id")
|
||||
|
||||
op.alter_column("translated_articles", "target_body", nullable=False)
|
||||
op.alter_column("translated_articles", "target_title", nullable=False)
|
||||
op.alter_column("translated_articles", "source_body", nullable=False)
|
||||
op.alter_column("translated_articles", "source_title", nullable=False)
|
||||
|
|
@ -9,6 +9,7 @@ class Settings(BaseSettings):
|
|||
deepgram_api_key: str
|
||||
gemini_api_key: str
|
||||
admin_user_emails: str = "" # comma-separated list of admin email addresses
|
||||
api_base_url: str = "http://localhost:8000"
|
||||
storage_endpoint_url: str
|
||||
storage_access_key: str
|
||||
storage_secret_key: str
|
||||
|
|
|
|||
|
|
@ -7,17 +7,8 @@ class SummariseJob:
|
|||
id: str
|
||||
user_id: str
|
||||
status: str
|
||||
source_language: str
|
||||
target_language: str
|
||||
complexity_level: str
|
||||
input_summary: str
|
||||
generated_text: str
|
||||
translated_text: str
|
||||
error_message: str
|
||||
audio_url: str
|
||||
source_pos_data: dict | None
|
||||
target_pos_data: dict | None
|
||||
audio_transcript: dict | None
|
||||
translated_article_id: str | None
|
||||
error_message: str | None
|
||||
created_at: datetime
|
||||
started_at: datetime | None = None
|
||||
completed_at: datetime | None = None
|
||||
|
|
|
|||
|
|
@ -1,15 +1,20 @@
|
|||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
@dataclass
|
||||
class TranslatedArticle:
|
||||
id: str
|
||||
|
||||
source_lang: str
|
||||
source_title: str
|
||||
source_text: str
|
||||
|
||||
target_lang: str
|
||||
target_title: str
|
||||
target_text: str
|
||||
|
||||
|
||||
published_at: datetime
|
||||
source_language: str
|
||||
target_language: str
|
||||
target_complexities: list[str]
|
||||
# Content fields — filled in step-by-step during generation
|
||||
source_title: str | None
|
||||
source_body: str | None
|
||||
source_body_pos: dict | None
|
||||
target_title: str | None
|
||||
target_body: str | None
|
||||
audio_url: str | None
|
||||
target_body_pos: dict | None
|
||||
target_body_transcript: dict | None
|
||||
|
|
|
|||
|
|
@ -1,30 +0,0 @@
|
|||
import re
|
||||
|
||||
from ..models.summarise_job import SummariseJob
|
||||
from ..models.translated_article import TranslatedArticle
|
||||
|
||||
def first_heading(md: str) -> str | None:
|
||||
m = re.search(r'^#{1,2}\s+(.+)', md, re.MULTILINE)
|
||||
return m.group(1).strip() if m else None
|
||||
|
||||
class ArticleService:
|
||||
def __init__(self, summarise_job_repository):
|
||||
self.summarise_job_repository = summarise_job_repository
|
||||
|
||||
async def get_all_articles(self) -> list[TranslatedArticle]:
|
||||
summarise_jobs = await self.summarise_job_repository.list_all()
|
||||
return summarise_jobs.map(self.summarise_job_to_translated_article)
|
||||
|
||||
def summarise_job_to_translated_article(
|
||||
self,
|
||||
summarise_job: SummariseJob,
|
||||
) -> TranslatedArticle:
|
||||
return TranslatedArticle(
|
||||
id=summarise_job.id,
|
||||
source_lang=summarise_job.target_language, # The source language for the article is the target language of the job
|
||||
source_title=first_heading(summarise_job.translated_text) or "",
|
||||
source_text=summarise_job.translated_text,
|
||||
target_lang=summarise_job.source_language, # The target language for the article is the source language of the job
|
||||
target_title=first_heading(summarise_job.generated_text) or "",
|
||||
target_text=summarise_job.generated_text,
|
||||
)
|
||||
149
api/app/domain/services/summarise_service.py
Normal file
149
api/app/domain/services/summarise_service.py
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
import asyncio
|
||||
import random
|
||||
import re
|
||||
import uuid
|
||||
from typing import Any, Callable, Coroutine
|
||||
|
||||
import anthropic
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ...outbound.postgres.repositories import summarise_job_repository
|
||||
from ...outbound.postgres.repositories.translated_article_repository import TranslatedArticleRepository
|
||||
from ...outbound.anthropic.anthropic_client import AnthropicClient
|
||||
from ...outbound.deepgram.deepgram_client import LocalDeepgramClient
|
||||
from ...outbound.deepl.deepl_client import DeepLClient
|
||||
from ...outbound.gemini.gemini_client import GeminiClient
|
||||
from ...outbound.spacy.spacy_client import SpacyClient
|
||||
from ...storage import upload_audio
|
||||
from ...languages import SUPPORTED_LANGUAGES
|
||||
|
||||
|
||||
|
||||
_ANTHROPIC_RETRYABLE = (
|
||||
anthropic.RateLimitError,
|
||||
anthropic.InternalServerError,
|
||||
anthropic.APITimeoutError,
|
||||
anthropic.APIConnectionError,
|
||||
)
|
||||
_MAX_RETRIES = 4
|
||||
_BASE_DELAY = 1.0
|
||||
_MAX_DELAY = 60.0
|
||||
|
||||
|
||||
async def _anthropic_with_backoff(
|
||||
coro_fn: Callable[..., Coroutine[Any, Any, Any]],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
for attempt in range(_MAX_RETRIES + 1):
|
||||
try:
|
||||
return await coro_fn(*args, **kwargs)
|
||||
except _ANTHROPIC_RETRYABLE as exc:
|
||||
if attempt == _MAX_RETRIES:
|
||||
raise
|
||||
retry_after: float | None = None
|
||||
if isinstance(exc, anthropic.RateLimitError):
|
||||
raw = exc.response.headers.get("retry-after")
|
||||
if raw is not None:
|
||||
retry_after = float(raw)
|
||||
if retry_after is None:
|
||||
retry_after = min(_BASE_DELAY * (2 ** attempt), _MAX_DELAY)
|
||||
jittered = retry_after * (0.8 + random.random() * 0.4)
|
||||
await asyncio.sleep(jittered)
|
||||
|
||||
|
||||
class SummariseService:
|
||||
def __init__(
|
||||
self,
|
||||
anthropic_client: AnthropicClient,
|
||||
deepgram_client: LocalDeepgramClient,
|
||||
deepl_client: DeepLClient,
|
||||
gemini_client: GeminiClient,
|
||||
spacy_client: SpacyClient,
|
||||
) -> None:
|
||||
self.anthropic_client = anthropic_client
|
||||
self.deepgram_client = deepgram_client
|
||||
self.deepl_client = deepl_client
|
||||
self.gemini_client = gemini_client
|
||||
self.spacy_client = spacy_client
|
||||
|
||||
def _first_heading(self, md: str) -> str | None:
|
||||
m = re.search(r'^#{1,2}\s+(.+)', md, re.MULTILINE)
|
||||
return m.group(1).strip() if m else None
|
||||
|
||||
def _split_title_and_body(self, text: str) -> tuple[str, str]:
|
||||
"""Splits the text into a title (first heading) and body (the rest)."""
|
||||
title = self._first_heading(text) or ""
|
||||
body = text[len(title):].lstrip() if title else text
|
||||
if title == "":
|
||||
title = "Untitled Article"
|
||||
|
||||
return title, body
|
||||
|
||||
async def run(
|
||||
self,
|
||||
db: AsyncSession,
|
||||
job_id: uuid.UUID,
|
||||
article_id: uuid.UUID,
|
||||
source_language: str,
|
||||
target_language: str,
|
||||
complexity_level: str,
|
||||
input_texts: list[str],
|
||||
) -> None:
|
||||
article_repo = TranslatedArticleRepository(db)
|
||||
job = await summarise_job_repository.get_by_id(db, job_id)
|
||||
await summarise_job_repository.mark_processing(db, job)
|
||||
|
||||
try:
|
||||
language_name = SUPPORTED_LANGUAGES[target_language]
|
||||
source_material = "\n\n".join(input_texts[:3])
|
||||
|
||||
generated_text = await _anthropic_with_backoff(
|
||||
self.anthropic_client.generate_summary_text,
|
||||
content_to_summarise=source_material,
|
||||
complexity_level=complexity_level,
|
||||
from_language=language_name,
|
||||
to_language=language_name,
|
||||
length_preference="200-400 words",
|
||||
)
|
||||
|
||||
generated_title, generated_text_without_title = self._split_title_and_body(generated_text)
|
||||
|
||||
await article_repo.update_content(
|
||||
article_id,
|
||||
target_title=generated_title,
|
||||
target_body=generated_text_without_title,
|
||||
source_title="",
|
||||
source_body="",
|
||||
)
|
||||
|
||||
translated_text = await self.deepl_client.translate(generated_text, source_language)
|
||||
|
||||
translated_title, translated_text_without_title = self._split_title_and_body(translated_text)
|
||||
|
||||
await article_repo.update_content(
|
||||
article_id,
|
||||
target_title=generated_title,
|
||||
target_body=generated_text_without_title,
|
||||
source_title=translated_title,
|
||||
source_body=translated_text_without_title,
|
||||
)
|
||||
|
||||
target_pos_data = self.spacy_client.get_parts_of_speech(generated_text_without_title, target_language)
|
||||
source_pos_data = self.spacy_client.get_parts_of_speech(translated_text_without_title, source_language)
|
||||
|
||||
await article_repo.update_pos(article_id, target_pos_data, source_pos_data)
|
||||
|
||||
voice = self.gemini_client.get_voice_by_language(target_language)
|
||||
wav_bytes = await self.gemini_client.generate_audio(generated_text, voice)
|
||||
audio_key = f"audio/{job_id}.wav"
|
||||
upload_audio(audio_key, wav_bytes)
|
||||
|
||||
transcript = await self.deepgram_client.transcribe_bytes(wav_bytes, target_language)
|
||||
|
||||
await article_repo.update_audio(article_id, audio_key, transcript)
|
||||
|
||||
await summarise_job_repository.mark_succeeded(db, job)
|
||||
|
||||
except Exception as exc:
|
||||
await summarise_job_repository.mark_failed(db, job, str(exc))
|
||||
|
|
@ -19,15 +19,15 @@ class AnthropicClient():
|
|||
) -> str:
|
||||
return (
|
||||
f"You are a language learning content creator.\n"
|
||||
f"You generate markdown summaries of user-provided content.\n"
|
||||
f"You generate original, level-appropriate content from a source.\n"
|
||||
f"The content will be spoken aloud in {to_language}, write it accordingly.\n"
|
||||
f"You will provide content in {to_language} at {complexity_level} proficiency level on the CEFR scale.\n"
|
||||
f"The text you generate will:\n"
|
||||
f"- Contain ONLY the generated summary text in {to_language}.\n"
|
||||
f"- Never contain inappropriate (hateful, sexual, violent) content. It is preferable to return no text than to generate such content.\n"
|
||||
f"- Speak directly to the reader/listener, adopting the tone and style of a semi-formal news reporter or podcaster.\n"
|
||||
f"- Where appropriate (fluency level, content), use a small number of idiomatic expressions.\n"
|
||||
f"- Where appropriate use at least one additional tense, beyond the default of the content.\n"
|
||||
f"- Be formatted in markdown. Contain a single level 1 header (#) at the top, followed by paragraphs and line breaks.\n"
|
||||
f"- Occasionally, where natural, include idiomatic expressions appropriate to {complexity_level} level.\n"
|
||||
f"- Vary tense usage naturally — do not restrict the piece to a single tense.\n"
|
||||
f"- Contain only plain text. The piece should start with a title prefaced like a level-1 markdown title (#), but all other text should be plain. \n"
|
||||
f"- Be around {length_preference} long.\n"
|
||||
f"- Be inspired by the content, but not the tone, of the source material."
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import asyncio
|
||||
import json
|
||||
|
||||
from deepgram import (
|
||||
AsyncDeepgramClient,
|
||||
)
|
||||
|
|
@ -15,7 +16,7 @@ class LocalDeepgramClient:
|
|||
utterances=True,
|
||||
smart_format=True,
|
||||
)
|
||||
return response.results.json()
|
||||
return json.loads(response.results.json())
|
||||
|
||||
async def transcribe_local_file(self, local_file_path: str, language_code: str) -> dict:
|
||||
with open(local_file_path, "rb") as audio_file:
|
||||
|
|
|
|||
|
|
@ -3,10 +3,11 @@ from datetime import datetime, timezone
|
|||
|
||||
from sqlalchemy import String, Text, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.dialects.postgresql import UUID, JSONB
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
from ..database import Base
|
||||
|
||||
|
||||
class SummariseJobEntity(Base):
|
||||
__tablename__ = "jobs"
|
||||
|
||||
|
|
@ -17,17 +18,10 @@ class SummariseJobEntity(Base):
|
|||
UUID(as_uuid=True), ForeignKey("users.id"), nullable=True, index=True
|
||||
)
|
||||
status: Mapped[str] = mapped_column(String(20), nullable=False, default="pending")
|
||||
source_language: Mapped[str] = mapped_column(String(10), nullable=False, default="en")
|
||||
target_language: Mapped[str] = mapped_column(String(10), nullable=False)
|
||||
complexity_level: Mapped[str] = mapped_column(String(5), nullable=False)
|
||||
input_summary: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
generated_text: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
translated_text: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
translated_article_id: Mapped[uuid.UUID | None] = mapped_column(
|
||||
UUID(as_uuid=True), ForeignKey("translated_articles.id"), nullable=True
|
||||
)
|
||||
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
audio_url: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
source_pos_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
target_pos_data: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
audio_transcript: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
|
|
|
|||
|
|
@ -0,0 +1,32 @@
|
|||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import String, Text, DateTime
|
||||
from sqlalchemy.orm import Mapped, mapped_column
|
||||
from sqlalchemy.dialects.postgresql import UUID, ARRAY, JSONB
|
||||
|
||||
from ..database import Base
|
||||
|
||||
|
||||
class TranslatedArticleEntity(Base):
|
||||
__tablename__ = "translated_articles"
|
||||
|
||||
id: Mapped[uuid.UUID] = mapped_column(
|
||||
UUID(as_uuid=True), primary_key=True, default=uuid.uuid4
|
||||
)
|
||||
published_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True),
|
||||
default=lambda: datetime.now(timezone.utc),
|
||||
)
|
||||
source_language: Mapped[str] = mapped_column(String(10), nullable=False)
|
||||
target_language: Mapped[str] = mapped_column(String(10), nullable=False)
|
||||
target_complexities: Mapped[list[str]] = mapped_column(ARRAY(String(5)), nullable=False)
|
||||
# Content fields — nullable, filled in step-by-step during generation
|
||||
source_title: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
source_body: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
source_body_pos: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
target_title: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
target_body: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
audio_url: Mapped[str | None] = mapped_column(Text, nullable=True)
|
||||
target_body_pos: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
target_body_transcript: Mapped[dict | None] = mapped_column(JSONB, nullable=True)
|
||||
|
|
@ -7,69 +7,32 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||
from ..entities.summarise_job_entity import SummariseJobEntity
|
||||
from ....domain.models.summarise_job import SummariseJob
|
||||
|
||||
class PostgresSummariseJobRepository:
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
async def list_all(self) -> list[SummariseJob]:
|
||||
result = self.db.execute(
|
||||
select(SummariseJobEntity).order_by(SummariseJobEntity.created_at.desc())
|
||||
)
|
||||
def _to_model(entity: SummariseJobEntity) -> SummariseJob:
|
||||
return SummariseJob(
|
||||
id=str(entity.id),
|
||||
user_id=str(entity.user_id),
|
||||
status=entity.status,
|
||||
translated_article_id=str(entity.translated_article_id) if entity.translated_article_id else None,
|
||||
error_message=entity.error_message,
|
||||
created_at=entity.created_at,
|
||||
started_at=entity.started_at,
|
||||
completed_at=entity.completed_at,
|
||||
)
|
||||
|
||||
return list(result.scalars().all()).map(self.entity_to_model)
|
||||
|
||||
async def get_by_audio_url(
|
||||
self,
|
||||
audio_url: str
|
||||
) -> SummariseJob | None:
|
||||
result = await self.db.execute(
|
||||
select(SummariseJobEntity).where(
|
||||
SummariseJobEntity.audio_url == audio_url
|
||||
)
|
||||
)
|
||||
|
||||
return self.entity_to_model(result.scalar_one_or_none())
|
||||
|
||||
def entity_to_model(self, entity: SummariseJobEntity | None) -> SummariseJob:
|
||||
if entity is None:
|
||||
return None
|
||||
|
||||
return SummariseJob(
|
||||
id=str(entity.id),
|
||||
user_id=str(entity.user_id),
|
||||
status=entity.status,
|
||||
source_language=entity.source_language,
|
||||
target_language=entity.target_language,
|
||||
complexity_level=entity.complexity_level,
|
||||
input_summary=entity.input_summary,
|
||||
generated_text=entity.generated_text,
|
||||
translated_text=entity.translated_text,
|
||||
error_message=entity.error_message,
|
||||
audio_url=entity.audio_url,
|
||||
source_pos_data=entity.source_pos_data,
|
||||
target_pos_data=entity.target_pos_data,
|
||||
audio_transcript=entity.audio_transcript,
|
||||
created_at=entity.created_at,
|
||||
started_at=entity.started_at,
|
||||
completed_at=entity.completed_at,
|
||||
)
|
||||
|
||||
async def update(db: AsyncSession, job: SummariseJobEntity) -> None:
|
||||
async def _commit(db: AsyncSession) -> None:
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def create(
|
||||
db: AsyncSession,
|
||||
user_id: uuid.UUID,
|
||||
source_language: str,
|
||||
target_language: str,
|
||||
complexity_level: str,
|
||||
translated_article_id: uuid.UUID,
|
||||
) -> SummariseJobEntity:
|
||||
job = SummariseJobEntity(
|
||||
user_id=user_id,
|
||||
source_language=source_language,
|
||||
target_language=target_language,
|
||||
complexity_level=complexity_level,
|
||||
translated_article_id=translated_article_id,
|
||||
)
|
||||
db.add(job)
|
||||
await db.commit()
|
||||
|
|
@ -92,58 +55,17 @@ async def mark_processing(db: AsyncSession, job: SummariseJobEntity) -> None:
|
|||
job.status = "processing"
|
||||
job.started_at = datetime.now(timezone.utc)
|
||||
job.error_message = None
|
||||
await update(db, job)
|
||||
await _commit(db)
|
||||
|
||||
|
||||
async def save_generated_text(
|
||||
db: AsyncSession,
|
||||
job: SummariseJobEntity,
|
||||
generated_text: str,
|
||||
input_summary: str,
|
||||
) -> None:
|
||||
job.generated_text = generated_text
|
||||
job.input_summary = input_summary
|
||||
await update(db, job)
|
||||
|
||||
|
||||
async def save_translated_text(
|
||||
db: AsyncSession,
|
||||
job: SummariseJobEntity,
|
||||
translated_text: str,
|
||||
) -> None:
|
||||
job.translated_text = translated_text
|
||||
await update(db, job)
|
||||
|
||||
|
||||
async def save_pos_data(
|
||||
db: AsyncSession,
|
||||
job: SummariseJobEntity,
|
||||
source_pos_data: dict,
|
||||
target_pos_data: dict,
|
||||
) -> None:
|
||||
job.source_pos_data = source_pos_data
|
||||
job.target_pos_data = target_pos_data
|
||||
await update(db, job)
|
||||
|
||||
|
||||
async def save_audio_transcript(
|
||||
db: AsyncSession,
|
||||
job: SummariseJobEntity,
|
||||
audio_transcript: dict,
|
||||
) -> None:
|
||||
job.audio_transcript = audio_transcript
|
||||
await update(db, job)
|
||||
|
||||
|
||||
async def mark_succeeded(db: AsyncSession, job: SummariseJobEntity, audio_url: str) -> None:
|
||||
async def mark_succeeded(db: AsyncSession, job: SummariseJobEntity) -> None:
|
||||
job.status = "succeeded"
|
||||
job.audio_url = audio_url
|
||||
job.completed_at = datetime.now(timezone.utc)
|
||||
await update(db, job)
|
||||
await _commit(db)
|
||||
|
||||
|
||||
async def mark_failed(db: AsyncSession, job: SummariseJobEntity, error: str) -> None:
|
||||
job.status = "failed"
|
||||
job.error_message = error
|
||||
job.completed_at = datetime.now(timezone.utc)
|
||||
await update(db, job)
|
||||
await _commit(db)
|
||||
|
|
|
|||
|
|
@ -0,0 +1,125 @@
|
|||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ..entities.translated_article_entity import TranslatedArticleEntity
|
||||
from ..entities.summarise_job_entity import SummariseJobEntity
|
||||
from ....domain.models.translated_article import TranslatedArticle
|
||||
|
||||
|
||||
class TranslatedArticleRepository:
|
||||
def __init__(self, db: AsyncSession):
|
||||
self.db = db
|
||||
|
||||
def _to_model(self, entity: TranslatedArticleEntity) -> TranslatedArticle:
|
||||
return TranslatedArticle(
|
||||
id=str(entity.id),
|
||||
published_at=entity.published_at,
|
||||
source_language=entity.source_language,
|
||||
target_language=entity.target_language,
|
||||
target_complexities=list(entity.target_complexities),
|
||||
source_title=entity.source_title,
|
||||
source_body=entity.source_body,
|
||||
source_body_pos=entity.source_body_pos,
|
||||
target_title=entity.target_title,
|
||||
target_body=entity.target_body,
|
||||
audio_url=entity.audio_url,
|
||||
target_body_pos=entity.target_body_pos,
|
||||
target_body_transcript=entity.target_body_transcript,
|
||||
)
|
||||
|
||||
async def create(
|
||||
self,
|
||||
source_language: str,
|
||||
target_language: str,
|
||||
target_complexities: list[str],
|
||||
) -> TranslatedArticle:
|
||||
entity = TranslatedArticleEntity(
|
||||
published_at=datetime.now(timezone.utc),
|
||||
source_language=source_language,
|
||||
target_language=target_language,
|
||||
target_complexities=target_complexities,
|
||||
)
|
||||
self.db.add(entity)
|
||||
await self.db.commit()
|
||||
await self.db.refresh(entity)
|
||||
return self._to_model(entity)
|
||||
|
||||
async def update_content(
|
||||
self,
|
||||
article_id: uuid.UUID,
|
||||
target_title: str,
|
||||
target_body: str,
|
||||
source_title: str,
|
||||
source_body: str,
|
||||
) -> None:
|
||||
entity = await self.db.get(TranslatedArticleEntity, article_id)
|
||||
entity.target_title = target_title
|
||||
entity.target_body = target_body
|
||||
entity.source_title = source_title
|
||||
entity.source_body = source_body
|
||||
await self.db.commit()
|
||||
|
||||
async def update_pos(self, article_id: uuid.UUID, target_body_pos: dict, source_body_pos: dict) -> None:
|
||||
entity = await self.db.get(TranslatedArticleEntity, article_id)
|
||||
entity.target_body_pos = target_body_pos
|
||||
entity.source_body_pos = source_body_pos
|
||||
await self.db.commit()
|
||||
|
||||
async def update_audio(
|
||||
self,
|
||||
article_id: uuid.UUID,
|
||||
audio_url: str,
|
||||
target_body_transcript: dict,
|
||||
) -> None:
|
||||
entity = await self.db.get(TranslatedArticleEntity, article_id)
|
||||
entity.audio_url = audio_url
|
||||
entity.target_body_transcript = target_body_transcript
|
||||
await self.db.commit()
|
||||
|
||||
async def get_by_audio_url(self, audio_url: str) -> TranslatedArticle | None:
|
||||
result = await self.db.execute(
|
||||
select(TranslatedArticleEntity).where(
|
||||
TranslatedArticleEntity.audio_url == audio_url
|
||||
)
|
||||
)
|
||||
entity = result.scalar_one_or_none()
|
||||
return self._to_model(entity) if entity else None
|
||||
|
||||
async def list_complete(self, target_language: str) -> list[TranslatedArticle]:
|
||||
"""Return articles that are fully generated (all content fields set, job succeeded)."""
|
||||
result = await self.db.execute(
|
||||
select(TranslatedArticleEntity)
|
||||
.join(
|
||||
SummariseJobEntity,
|
||||
SummariseJobEntity.translated_article_id == TranslatedArticleEntity.id,
|
||||
)
|
||||
.where(
|
||||
TranslatedArticleEntity.target_language == target_language,
|
||||
SummariseJobEntity.status == "succeeded",
|
||||
TranslatedArticleEntity.target_body.is_not(None),
|
||||
TranslatedArticleEntity.source_body.is_not(None),
|
||||
)
|
||||
.order_by(TranslatedArticleEntity.published_at.desc())
|
||||
)
|
||||
return [self._to_model(e) for e in result.scalars().all()]
|
||||
|
||||
async def get_complete_by_id(self, article_id: uuid.UUID) -> TranslatedArticle | None:
|
||||
"""Return the article only if fully generated (all content fields set, job succeeded)."""
|
||||
result = await self.db.execute(
|
||||
select(TranslatedArticleEntity)
|
||||
.join(
|
||||
SummariseJobEntity,
|
||||
SummariseJobEntity.translated_article_id == TranslatedArticleEntity.id,
|
||||
)
|
||||
.where(
|
||||
TranslatedArticleEntity.id == article_id,
|
||||
SummariseJobEntity.status == "succeeded",
|
||||
TranslatedArticleEntity.target_body.is_not(None),
|
||||
TranslatedArticleEntity.source_body.is_not(None),
|
||||
)
|
||||
)
|
||||
entity = result.scalar_one_or_none()
|
||||
return self._to_model(entity) if entity else None
|
||||
|
|
@ -29,18 +29,31 @@ class SpacyClient:
|
|||
return self._cache[language]
|
||||
|
||||
def get_parts_of_speech(self, text: str, language: str) -> dict:
|
||||
"""Use SpaCy to get parts of speech for the given text and language,
|
||||
broken down by sentences and then by tokens."""
|
||||
nlp = self._get_nlp(language)
|
||||
doc = nlp(text)
|
||||
tokens = [
|
||||
|
||||
sentences = [
|
||||
{
|
||||
"text": token.text,
|
||||
"lemma": token.lemma_,
|
||||
"pos": token.pos_,
|
||||
"tag": token.tag_,
|
||||
"dep": token.dep_,
|
||||
"is_stop": token.is_stop,
|
||||
"text": sent.text,
|
||||
"tokens": [
|
||||
{
|
||||
"text": token.text,
|
||||
"lemma": token.lemma_,
|
||||
"type": token.ent_type_ if token.ent_type_ else None,
|
||||
"pos": token.pos_,
|
||||
"tag": token.tag_,
|
||||
"dep": token.dep_,
|
||||
"is_stop": token.is_stop,
|
||||
"is_punct": token.is_punct,
|
||||
"is_alpha": token.is_alpha,
|
||||
|
||||
}
|
||||
for token in sent
|
||||
if not token.is_space
|
||||
],
|
||||
}
|
||||
for token in doc
|
||||
if not token.is_space
|
||||
for sent in doc.sents
|
||||
]
|
||||
return {"language": language, "tokens": tokens}
|
||||
return {"language": language, "sentences": sentences}
|
||||
|
|
|
|||
|
|
@ -1,62 +1,26 @@
|
|||
import asyncio
|
||||
import random
|
||||
import uuid
|
||||
from functools import partial
|
||||
from typing import Any, Callable, Coroutine
|
||||
|
||||
import anthropic
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ...languages import SUPPORTED_LANGUAGES, SUPPORTED_LEVELS
|
||||
from ...auth import require_admin
|
||||
from ...storage import upload_audio
|
||||
from ...outbound.postgres.database import get_db, AsyncSessionLocal
|
||||
from ...outbound.postgres.repositories import summarise_job_repository
|
||||
from ...outbound.postgres.repositories.translated_article_repository import TranslatedArticleRepository
|
||||
from ...outbound.anthropic.anthropic_client import AnthropicClient
|
||||
from ...outbound.deepgram.deepgram_client import LocalDeepgramClient
|
||||
from ...outbound.deepl.deepl_client import DeepLClient
|
||||
from ...outbound.gemini.gemini_client import GeminiClient
|
||||
from ...outbound.spacy.spacy_client import SpacyClient
|
||||
from ...domain.services.summarise_service import SummariseService
|
||||
from ...config import settings
|
||||
from ... import worker
|
||||
|
||||
router = APIRouter(prefix="/generate", tags=["api"])
|
||||
|
||||
_ANTHROPIC_RETRYABLE = (
|
||||
anthropic.RateLimitError,
|
||||
anthropic.InternalServerError,
|
||||
anthropic.APITimeoutError,
|
||||
anthropic.APIConnectionError,
|
||||
)
|
||||
_MAX_RETRIES = 4
|
||||
_BASE_DELAY = 1.0 # seconds
|
||||
_MAX_DELAY = 60.0 # seconds
|
||||
|
||||
|
||||
async def _anthropic_with_backoff(
|
||||
coro_fn: Callable[..., Coroutine[Any, Any, Any]],
|
||||
*args: Any,
|
||||
**kwargs: Any,
|
||||
) -> Any:
|
||||
for attempt in range(_MAX_RETRIES + 1):
|
||||
try:
|
||||
return await coro_fn(*args, **kwargs)
|
||||
except _ANTHROPIC_RETRYABLE as exc:
|
||||
if attempt == _MAX_RETRIES:
|
||||
raise
|
||||
retry_after: float | None = None
|
||||
if isinstance(exc, anthropic.RateLimitError):
|
||||
raw = exc.response.headers.get("retry-after")
|
||||
if raw is not None:
|
||||
retry_after = float(raw)
|
||||
if retry_after is None:
|
||||
retry_after = min(_BASE_DELAY * (2 ** attempt), _MAX_DELAY)
|
||||
# ±20 % jitter to spread out concurrent retries
|
||||
jittered = retry_after * (0.8 + random.random() * 0.4)
|
||||
await asyncio.sleep(jittered)
|
||||
|
||||
|
||||
class GenerationRequest(BaseModel):
|
||||
target_language: str
|
||||
|
|
@ -69,56 +33,28 @@ class GenerationResponse(BaseModel):
|
|||
job_id: str
|
||||
|
||||
|
||||
async def _run_generation(job_id: uuid.UUID, request: GenerationRequest) -> None:
|
||||
anthropic_client = AnthropicClient.new(settings.anthropic_api_key)
|
||||
deepgram_client = LocalDeepgramClient(settings.deepgram_api_key)
|
||||
deepl_client = DeepLClient(settings.deepl_api_key)
|
||||
gemini_client = GeminiClient(settings.gemini_api_key)
|
||||
spacy_client = SpacyClient()
|
||||
|
||||
async def _run_generation(
|
||||
job_id: uuid.UUID,
|
||||
article_id: uuid.UUID,
|
||||
request: GenerationRequest,
|
||||
) -> None:
|
||||
service = SummariseService(
|
||||
anthropic_client=AnthropicClient.new(settings.anthropic_api_key),
|
||||
deepgram_client=LocalDeepgramClient(settings.deepgram_api_key),
|
||||
deepl_client=DeepLClient(settings.deepl_api_key),
|
||||
gemini_client=GeminiClient(settings.gemini_api_key),
|
||||
spacy_client=SpacyClient(),
|
||||
)
|
||||
async with AsyncSessionLocal() as db:
|
||||
job = await summarise_job_repository.get_by_id(db, job_id)
|
||||
await summarise_job_repository.mark_processing(db, job)
|
||||
|
||||
try:
|
||||
language_name = SUPPORTED_LANGUAGES[request.target_language]
|
||||
|
||||
source_material = "\n\n".join(request.input_texts[:3])
|
||||
|
||||
generated_text = await _anthropic_with_backoff(
|
||||
anthropic_client.generate_summary_text,
|
||||
content_to_summarise=source_material,
|
||||
complexity_level=request.complexity_level,
|
||||
from_language=language_name,
|
||||
to_language=language_name,
|
||||
length_preference="200-400 words",
|
||||
)
|
||||
|
||||
await summarise_job_repository.save_generated_text(
|
||||
db, job, generated_text, source_material[:500]
|
||||
)
|
||||
|
||||
translated_text = await deepl_client.translate(generated_text, request.source_language)
|
||||
|
||||
await summarise_job_repository.save_translated_text(db, job, translated_text)
|
||||
|
||||
target_pos_data = spacy_client.get_parts_of_speech(generated_text, request.target_language)
|
||||
source_pos_data = spacy_client.get_parts_of_speech(translated_text, request.source_language)
|
||||
|
||||
await summarise_job_repository.save_pos_data(db, job, source_pos_data, target_pos_data)
|
||||
|
||||
voice = gemini_client.get_voice_by_language(request.target_language)
|
||||
wav_bytes = await gemini_client.generate_audio(generated_text, voice)
|
||||
audio_key = f"audio/{job_id}.wav"
|
||||
upload_audio(audio_key, wav_bytes)
|
||||
|
||||
transcript = await deepgram_client.transcribe_bytes(wav_bytes, request.target_language)
|
||||
await summarise_job_repository.save_audio_transcript(db, job, transcript)
|
||||
|
||||
await summarise_job_repository.mark_succeeded(db, job, audio_key)
|
||||
|
||||
except Exception as exc:
|
||||
await summarise_job_repository.mark_failed(db, job, str(exc))
|
||||
await service.run(
|
||||
db=db,
|
||||
job_id=job_id,
|
||||
article_id=article_id,
|
||||
source_language=request.source_language,
|
||||
target_language=request.target_language,
|
||||
complexity_level=request.complexity_level,
|
||||
input_texts=request.input_texts,
|
||||
)
|
||||
|
||||
|
||||
@router.post("", response_model=GenerationResponse, status_code=202)
|
||||
|
|
@ -140,14 +76,18 @@ async def create_generation_job(
|
|||
f"Supported: {sorted(SUPPORTED_LEVELS)}",
|
||||
)
|
||||
|
||||
article = await TranslatedArticleRepository(db).create(
|
||||
source_language=request.source_language,
|
||||
target_language=request.target_language,
|
||||
target_complexities=[request.complexity_level],
|
||||
)
|
||||
|
||||
job = await summarise_job_repository.create(
|
||||
db,
|
||||
user_id=uuid.UUID(token_data["sub"]),
|
||||
source_language=request.source_language,
|
||||
target_language=request.target_language,
|
||||
complexity_level=request.complexity_level,
|
||||
translated_article_id=uuid.UUID(article.id),
|
||||
)
|
||||
|
||||
await worker.enqueue(partial(_run_generation, job.id, request))
|
||||
await worker.enqueue(partial(_run_generation, job.id, uuid.UUID(article.id), request))
|
||||
|
||||
return GenerationResponse(job_id=str(job.id))
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import uuid
|
||||
from datetime import datetime, timezone
|
||||
from datetime import datetime
|
||||
from functools import partial
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
|
|
@ -9,6 +9,8 @@ from sqlalchemy.ext.asyncio import AsyncSession
|
|||
from ...auth import require_admin
|
||||
from ...outbound.postgres.database import get_db, AsyncSessionLocal
|
||||
from ...outbound.postgres.repositories import summarise_job_repository
|
||||
from ...outbound.postgres.repositories.translated_article_repository import TranslatedArticleRepository
|
||||
from ...outbound.postgres.entities.translated_article_entity import TranslatedArticleEntity
|
||||
from ...outbound.gemini.gemini_client import GeminiClient
|
||||
from ...storage import upload_audio
|
||||
from ...config import settings
|
||||
|
|
@ -20,21 +22,10 @@ router = APIRouter(prefix="/jobs", dependencies=[Depends(require_admin)])
|
|||
class JobResponse(BaseModel):
|
||||
id: uuid.UUID
|
||||
status: str
|
||||
source_language: str
|
||||
target_language: str
|
||||
complexity_level: str
|
||||
translated_article_id: uuid.UUID | None = None
|
||||
created_at: datetime
|
||||
started_at: datetime | None = None
|
||||
completed_at: datetime | None = None
|
||||
# only present on success
|
||||
generated_text: str | None = None
|
||||
generated_text_pos: dict | None = None
|
||||
translated_text: str | None = None
|
||||
translated_text_pos: dict | None = None
|
||||
input_summary: str | None = None
|
||||
audio_url: str | None = None
|
||||
audio_transcript: dict | None = None
|
||||
# only present on failure
|
||||
error_message: str | None = None
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
|
@ -45,6 +36,7 @@ class JobSummary(BaseModel):
|
|||
created_at: datetime
|
||||
completed_at: datetime | None = None
|
||||
error_message: str | None = None
|
||||
model_config = {"from_attributes": True}
|
||||
|
||||
|
||||
class JobListResponse(BaseModel):
|
||||
|
|
@ -77,44 +69,37 @@ async def get_job(
|
|||
if job is None:
|
||||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
response = JobResponse(
|
||||
id=str(job.id),
|
||||
return JobResponse(
|
||||
id=job.id,
|
||||
status=job.status,
|
||||
source_language=job.source_language,
|
||||
target_language=job.target_language,
|
||||
complexity_level=job.complexity_level,
|
||||
translated_article_id=job.translated_article_id,
|
||||
created_at=job.created_at,
|
||||
started_at=job.started_at,
|
||||
completed_at=job.completed_at,
|
||||
error_message=job.error_message,
|
||||
)
|
||||
|
||||
if job.status == "succeeded":
|
||||
response.generated_text = job.generated_text
|
||||
response.generated_text_pos = job.target_pos_data
|
||||
response.translated_text = job.translated_text
|
||||
response.translated_text_pos = job.source_pos_data
|
||||
response.input_summary = job.input_summary
|
||||
response.audio_url = job.audio_url
|
||||
response.audio_transcript = job.audio_transcript
|
||||
elif job.status == "failed":
|
||||
response.error_message = job.error_message
|
||||
|
||||
return response
|
||||
|
||||
|
||||
async def _run_regenerate_audio(job_id: uuid.UUID) -> None:
|
||||
gemini_client = GeminiClient(settings.gemini_api_key)
|
||||
async with AsyncSessionLocal() as db:
|
||||
job = await summarise_job_repository.get_by_id(db, job_id)
|
||||
article_repo = TranslatedArticleRepository(db)
|
||||
article_entity = await db.get(TranslatedArticleEntity, job.translated_article_id)
|
||||
await summarise_job_repository.mark_processing(db, job)
|
||||
|
||||
try:
|
||||
voice = gemini_client.get_voice_by_language(job.target_language)
|
||||
wav_bytes = await gemini_client.generate_audio(job.generated_text, voice)
|
||||
voice = gemini_client.get_voice_by_language(article_entity.target_language)
|
||||
wav_bytes = await gemini_client.generate_audio(article_entity.target_body, voice)
|
||||
audio_key = f"audio/{job_id}.wav"
|
||||
upload_audio(audio_key, wav_bytes)
|
||||
|
||||
await summarise_job_repository.mark_succeeded(db, job, audio_key)
|
||||
await article_repo.update_audio(
|
||||
article_entity.id,
|
||||
audio_url=audio_key,
|
||||
target_body_transcript=article_entity.target_body_transcript,
|
||||
)
|
||||
await summarise_job_repository.mark_succeeded(db, job)
|
||||
|
||||
except Exception as exc:
|
||||
await summarise_job_repository.mark_failed(db, job, str(exc))
|
||||
|
|
@ -136,19 +121,21 @@ async def regenerate_audio(
|
|||
raise HTTPException(status_code=404, detail="Job not found")
|
||||
|
||||
if str(job.user_id) != token_data["sub"]:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="Not authorized to modify this job")
|
||||
raise HTTPException(status_code=403, detail="Not authorized to modify this job")
|
||||
|
||||
if not job.generated_text:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Job has no generated text to synthesize")
|
||||
if job.translated_article_id is None:
|
||||
raise HTTPException(status_code=400, detail="Job has no associated article")
|
||||
|
||||
if job.audio_url:
|
||||
article_entity = await db.get(TranslatedArticleEntity, job.translated_article_id)
|
||||
|
||||
if not article_entity or not article_entity.target_body:
|
||||
raise HTTPException(status_code=400, detail="Job has no generated text to synthesize")
|
||||
|
||||
if article_entity.audio_url:
|
||||
raise HTTPException(status_code=409, detail="Job already has audio")
|
||||
|
||||
if job.status == "processing":
|
||||
raise HTTPException(
|
||||
status_code=409, detail="Job is already processing")
|
||||
raise HTTPException(status_code=409, detail="Job is already processing")
|
||||
|
||||
await worker.enqueue(partial(_run_regenerate_audio, uid))
|
||||
return {"job_id": job_id}
|
||||
|
|
|
|||
|
|
@ -1,30 +1,104 @@
|
|||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ...domain.services.article_service import ArticleService
|
||||
from ...outbound.postgres.database import get_db, AsyncSessionLocal
|
||||
from ...outbound.postgres.repositories.summarise_job_repository import PostgresSummariseJobRepository
|
||||
from ...auth import verify_token
|
||||
from ...config import settings
|
||||
from ...outbound.postgres.database import get_db
|
||||
from ...outbound.postgres.repositories.translated_article_repository import TranslatedArticleRepository
|
||||
|
||||
router = APIRouter(prefix="/articles", tags=["bff", "articles"])
|
||||
|
||||
|
||||
router = APIRouter(prefix="/articles", tags=["articles"])
|
||||
|
||||
|
||||
class ArticleResponse(BaseModel):
|
||||
class ArticleItem(BaseModel):
|
||||
id: str
|
||||
published_at: datetime
|
||||
source_language: str
|
||||
source_title: str
|
||||
target_language: str
|
||||
complexity_level: str
|
||||
input_texts: list[str]
|
||||
target_complexities: list[str]
|
||||
target_title: str
|
||||
|
||||
class ArticlesResponse(BaseModel):
|
||||
articles: list[ArticleResponse]
|
||||
|
||||
@router.get("", response_model=ArticlesResponse, status_code=200)
|
||||
async def get_articles(
|
||||
db = Depends(get_db),
|
||||
) -> ArticlesResponse:
|
||||
service = ArticleService(PostgresSummariseJobRepository(db))
|
||||
class ArticleListResponse(BaseModel):
|
||||
articles: list[ArticleItem]
|
||||
|
||||
|
||||
class ArticleDetail(BaseModel):
|
||||
id: str
|
||||
published_at: datetime
|
||||
source_language: str
|
||||
source_title: str
|
||||
source_body: str
|
||||
source_body_pos: dict
|
||||
target_language: str
|
||||
target_complexities: list[str]
|
||||
target_title: str
|
||||
target_body: str
|
||||
target_audio_url: str | None
|
||||
target_body_pos: dict
|
||||
target_body_transcript: dict | None
|
||||
|
||||
|
||||
def _audio_url(key: str | None) -> str | None:
|
||||
if key is None:
|
||||
return None
|
||||
return f"{settings.api_base_url}/media/{key}"
|
||||
|
||||
|
||||
@router.get("", response_model=ArticleListResponse, status_code=200)
|
||||
async def list_articles(
|
||||
target_language: str = 'fr',
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: dict = Depends(verify_token),
|
||||
) -> ArticleListResponse:
|
||||
articles = await TranslatedArticleRepository(db).list_complete(target_language=target_language)
|
||||
return ArticleListResponse(
|
||||
articles=[
|
||||
ArticleItem(
|
||||
id=a.id,
|
||||
published_at=a.published_at,
|
||||
source_language=a.source_language,
|
||||
source_title=a.source_title,
|
||||
target_language=a.target_language,
|
||||
target_complexities=a.target_complexities,
|
||||
target_title=a.target_title,
|
||||
)
|
||||
for a in articles
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
@router.get("/{article_id}", response_model=ArticleDetail, status_code=200)
|
||||
async def get_article(
|
||||
article_id: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
_: dict = Depends(verify_token),
|
||||
) -> ArticleDetail:
|
||||
try:
|
||||
articles = await service.get_all_articles()
|
||||
return ArticlesResponse(articles=articles)
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
uid = uuid.UUID(article_id)
|
||||
except ValueError:
|
||||
raise HTTPException(status_code=400, detail="Invalid article ID")
|
||||
|
||||
article = await TranslatedArticleRepository(db).get_complete_by_id(uid)
|
||||
if article is None:
|
||||
raise HTTPException(status_code=404, detail="Article not found")
|
||||
|
||||
return ArticleDetail(
|
||||
id=article.id,
|
||||
published_at=article.published_at,
|
||||
source_language=article.source_language,
|
||||
source_title=article.source_title,
|
||||
source_body=article.source_body,
|
||||
source_body_pos=article.source_body_pos,
|
||||
target_language=article.target_language,
|
||||
target_complexities=article.target_complexities,
|
||||
target_title=article.target_title,
|
||||
target_body=article.target_body,
|
||||
target_audio_url=_audio_url(article.audio_url),
|
||||
target_body_pos=article.target_body_pos,
|
||||
target_body_transcript=article.target_body_transcript,
|
||||
)
|
||||
|
|
|
|||
|
|
@ -1,13 +1,10 @@
|
|||
import uuid
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from fastapi.responses import Response
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from botocore.exceptions import ClientError
|
||||
|
||||
from ..auth import verify_token
|
||||
from ..outbound.postgres.database import get_db
|
||||
from ..outbound.postgres.repositories.summarise_job_repository import PostgresSummariseJobRepository
|
||||
from ..outbound.postgres.repositories.translated_article_repository import TranslatedArticleRepository
|
||||
from ..storage import download_audio
|
||||
|
||||
router = APIRouter(prefix="/media", tags=["media"])
|
||||
|
|
@ -18,10 +15,9 @@ async def get_media_file(
|
|||
filename: str,
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> Response:
|
||||
repository = PostgresSummariseJobRepository(db)
|
||||
job = await repository.get_by_audio_url(filename)
|
||||
article = await TranslatedArticleRepository(db).get_by_audio_url(filename)
|
||||
|
||||
if job is None:
|
||||
if article is None:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -38,6 +38,8 @@ services:
|
|||
- "${API_PORT:-8000}:8000"
|
||||
environment:
|
||||
DATABASE_URL: postgresql+asyncpg://${POSTGRES_USER:-langlearn}:${POSTGRES_PASSWORD}@db:5432/${POSTGRES_DB:-langlearn}
|
||||
ADMIN_USER_EMAILS: ${ADMIN_USER_EMAILS:-wilson@thomaswilson.xyz}
|
||||
API_BASE_URL: ${API_BASE_URL:-http://localhost:8000}
|
||||
JWT_SECRET: ${JWT_SECRET}
|
||||
ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY}
|
||||
DEEPL_API_KEY: ${DEEPL_API_KEY}
|
||||
|
|
|
|||
|
|
@ -66,7 +66,8 @@
|
|||
--text-headline-sm: 1.25rem;
|
||||
--text-title-lg: 1.125rem;
|
||||
--text-title-md: 1rem;
|
||||
--text-body-lg: 1rem; /* long-form reading standard */
|
||||
--text-body-xl: 1.25rem; /* long-form reading standard */
|
||||
--text-body-lg: 1rem;
|
||||
--text-body-md: 0.9375rem;
|
||||
--text-body-sm: 0.875rem;
|
||||
--text-label-lg: 0.875rem;
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// This file is auto-generated by @hey-api/openapi-ts
|
||||
|
||||
export { analyzePosApiPosPost, createGenerationJobApiGeneratePost, getArticlesBffArticlesGet, getJobApiJobsJobIdGet, getJobsApiJobsGet, getMediaFileMediaFilenameGet, healthHealthGet, loginAuthLoginPost, type Options, regenerateAudioApiJobsJobIdRegenerateAudioPost, registerAuthRegisterPost, translateTextApiTranslateGet } from './sdk.gen';
|
||||
export type { AnalyzePosApiPosPostData, AnalyzePosApiPosPostError, AnalyzePosApiPosPostErrors, AnalyzePosApiPosPostResponse, AnalyzePosApiPosPostResponses, ArticleResponse, ArticlesResponse, ClientOptions, CreateGenerationJobApiGeneratePostData, CreateGenerationJobApiGeneratePostError, CreateGenerationJobApiGeneratePostErrors, CreateGenerationJobApiGeneratePostResponse, CreateGenerationJobApiGeneratePostResponses, GenerationRequest, GenerationResponse, GetArticlesBffArticlesGetData, GetArticlesBffArticlesGetResponse, GetArticlesBffArticlesGetResponses, GetJobApiJobsJobIdGetData, GetJobApiJobsJobIdGetError, GetJobApiJobsJobIdGetErrors, GetJobApiJobsJobIdGetResponse, GetJobApiJobsJobIdGetResponses, GetJobsApiJobsGetData, GetJobsApiJobsGetResponse, GetJobsApiJobsGetResponses, GetMediaFileMediaFilenameGetData, GetMediaFileMediaFilenameGetError, GetMediaFileMediaFilenameGetErrors, GetMediaFileMediaFilenameGetResponses, HealthHealthGetData, HealthHealthGetResponse, HealthHealthGetResponses, HttpValidationError, JobListResponse, JobResponse, JobSummary, LoginAuthLoginPostData, LoginAuthLoginPostError, LoginAuthLoginPostErrors, LoginAuthLoginPostResponse, LoginAuthLoginPostResponses, LoginRequest, PosRequest, PosResponse, RegenerateAudioApiJobsJobIdRegenerateAudioPostData, RegenerateAudioApiJobsJobIdRegenerateAudioPostError, RegenerateAudioApiJobsJobIdRegenerateAudioPostErrors, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponse, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses, RegisterAuthRegisterPostData, RegisterAuthRegisterPostError, RegisterAuthRegisterPostErrors, RegisterAuthRegisterPostResponses, RegisterRequest, TokenInfo, TokenResponse, TranslateTextApiTranslateGetData, TranslateTextApiTranslateGetError, TranslateTextApiTranslateGetErrors, TranslateTextApiTranslateGetResponse, TranslateTextApiTranslateGetResponses, TranslationResponse, ValidationError } from './types.gen';
|
||||
export { analyzePosApiPosPost, createGenerationJobApiGeneratePost, getArticleBffArticlesArticleIdGet, getJobApiJobsJobIdGet, getJobsApiJobsGet, getMediaFileMediaFilenameGet, getUserProfileBffUserProfileGet, healthHealthGet, listArticlesBffArticlesGet, loginAuthLoginPost, type Options, regenerateAudioApiJobsJobIdRegenerateAudioPost, registerAuthRegisterPost, translateTextApiTranslateGet, upsertLearnableLanguageApiLearnableLanguagesPost } from './sdk.gen';
|
||||
export type { AnalyzePosApiPosPostData, AnalyzePosApiPosPostError, AnalyzePosApiPosPostErrors, AnalyzePosApiPosPostResponse, AnalyzePosApiPosPostResponses, ArticleDetail, ArticleItem, ArticleListResponse, ClientOptions, CreateGenerationJobApiGeneratePostData, CreateGenerationJobApiGeneratePostError, CreateGenerationJobApiGeneratePostErrors, CreateGenerationJobApiGeneratePostResponse, CreateGenerationJobApiGeneratePostResponses, GenerationRequest, GenerationResponse, GetArticleBffArticlesArticleIdGetData, GetArticleBffArticlesArticleIdGetError, GetArticleBffArticlesArticleIdGetErrors, GetArticleBffArticlesArticleIdGetResponse, GetArticleBffArticlesArticleIdGetResponses, GetJobApiJobsJobIdGetData, GetJobApiJobsJobIdGetError, GetJobApiJobsJobIdGetErrors, GetJobApiJobsJobIdGetResponse, GetJobApiJobsJobIdGetResponses, GetJobsApiJobsGetData, GetJobsApiJobsGetResponse, GetJobsApiJobsGetResponses, GetMediaFileMediaFilenameGetData, GetMediaFileMediaFilenameGetError, GetMediaFileMediaFilenameGetErrors, GetMediaFileMediaFilenameGetResponses, GetUserProfileBffUserProfileGetData, GetUserProfileBffUserProfileGetResponse, GetUserProfileBffUserProfileGetResponses, HealthHealthGetData, HealthHealthGetResponse, HealthHealthGetResponses, HttpValidationError, JobListResponse, JobResponse, JobSummary, LearnableLanguageItem, LearnableLanguageRequest, LearnableLanguageResponse, ListArticlesBffArticlesGetData, ListArticlesBffArticlesGetError, ListArticlesBffArticlesGetErrors, ListArticlesBffArticlesGetResponse, ListArticlesBffArticlesGetResponses, LoginAuthLoginPostData, LoginAuthLoginPostError, LoginAuthLoginPostErrors, LoginAuthLoginPostResponse, LoginAuthLoginPostResponses, LoginRequest, PosRequest, PosResponse, RegenerateAudioApiJobsJobIdRegenerateAudioPostData, RegenerateAudioApiJobsJobIdRegenerateAudioPostError, RegenerateAudioApiJobsJobIdRegenerateAudioPostErrors, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponse, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses, RegisterAuthRegisterPostData, RegisterAuthRegisterPostError, RegisterAuthRegisterPostErrors, RegisterAuthRegisterPostResponses, RegisterRequest, TokenInfo, TokenResponse, TranslateTextApiTranslateGetData, TranslateTextApiTranslateGetError, TranslateTextApiTranslateGetErrors, TranslateTextApiTranslateGetResponse, TranslateTextApiTranslateGetResponses, TranslationResponse, UpsertLearnableLanguageApiLearnableLanguagesPostData, UpsertLearnableLanguageApiLearnableLanguagesPostError, UpsertLearnableLanguageApiLearnableLanguagesPostErrors, UpsertLearnableLanguageApiLearnableLanguagesPostResponse, UpsertLearnableLanguageApiLearnableLanguagesPostResponses, UserProfileResponse, ValidationError } from './types.gen';
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
import type { Client, Options as Options2, TDataShape } from './client';
|
||||
import { client } from './client.gen';
|
||||
import type { AnalyzePosApiPosPostData, AnalyzePosApiPosPostErrors, AnalyzePosApiPosPostResponses, CreateGenerationJobApiGeneratePostData, CreateGenerationJobApiGeneratePostErrors, CreateGenerationJobApiGeneratePostResponses, GetArticlesBffArticlesGetData, GetArticlesBffArticlesGetResponses, GetJobApiJobsJobIdGetData, GetJobApiJobsJobIdGetErrors, GetJobApiJobsJobIdGetResponses, GetJobsApiJobsGetData, GetJobsApiJobsGetResponses, GetMediaFileMediaFilenameGetData, GetMediaFileMediaFilenameGetErrors, GetMediaFileMediaFilenameGetResponses, HealthHealthGetData, HealthHealthGetResponses, LoginAuthLoginPostData, LoginAuthLoginPostErrors, LoginAuthLoginPostResponses, RegenerateAudioApiJobsJobIdRegenerateAudioPostData, RegenerateAudioApiJobsJobIdRegenerateAudioPostErrors, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses, RegisterAuthRegisterPostData, RegisterAuthRegisterPostErrors, RegisterAuthRegisterPostResponses, TranslateTextApiTranslateGetData, TranslateTextApiTranslateGetErrors, TranslateTextApiTranslateGetResponses } from './types.gen';
|
||||
import type { AnalyzePosApiPosPostData, AnalyzePosApiPosPostErrors, AnalyzePosApiPosPostResponses, CreateGenerationJobApiGeneratePostData, CreateGenerationJobApiGeneratePostErrors, CreateGenerationJobApiGeneratePostResponses, GetArticleBffArticlesArticleIdGetData, GetArticleBffArticlesArticleIdGetErrors, GetArticleBffArticlesArticleIdGetResponses, GetJobApiJobsJobIdGetData, GetJobApiJobsJobIdGetErrors, GetJobApiJobsJobIdGetResponses, GetJobsApiJobsGetData, GetJobsApiJobsGetResponses, GetMediaFileMediaFilenameGetData, GetMediaFileMediaFilenameGetErrors, GetMediaFileMediaFilenameGetResponses, GetUserProfileBffUserProfileGetData, GetUserProfileBffUserProfileGetResponses, HealthHealthGetData, HealthHealthGetResponses, ListArticlesBffArticlesGetData, ListArticlesBffArticlesGetErrors, ListArticlesBffArticlesGetResponses, LoginAuthLoginPostData, LoginAuthLoginPostErrors, LoginAuthLoginPostResponses, RegenerateAudioApiJobsJobIdRegenerateAudioPostData, RegenerateAudioApiJobsJobIdRegenerateAudioPostErrors, RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses, RegisterAuthRegisterPostData, RegisterAuthRegisterPostErrors, RegisterAuthRegisterPostResponses, TranslateTextApiTranslateGetData, TranslateTextApiTranslateGetErrors, TranslateTextApiTranslateGetResponses, UpsertLearnableLanguageApiLearnableLanguagesPostData, UpsertLearnableLanguageApiLearnableLanguagesPostErrors, UpsertLearnableLanguageApiLearnableLanguagesPostResponses } from './types.gen';
|
||||
|
||||
export type Options<TData extends TDataShape = TDataShape, ThrowOnError extends boolean = boolean> = Options2<TData, ThrowOnError> & {
|
||||
/**
|
||||
|
|
@ -81,9 +81,44 @@ export const regenerateAudioApiJobsJobIdRegenerateAudioPost = <ThrowOnError exte
|
|||
});
|
||||
|
||||
/**
|
||||
* Get Articles
|
||||
* Upsert Learnable Language
|
||||
*/
|
||||
export const getArticlesBffArticlesGet = <ThrowOnError extends boolean = false>(options?: Options<GetArticlesBffArticlesGetData, ThrowOnError>) => (options?.client ?? client).get<GetArticlesBffArticlesGetResponses, unknown, ThrowOnError>({ url: '/bff/articles', ...options });
|
||||
export const upsertLearnableLanguageApiLearnableLanguagesPost = <ThrowOnError extends boolean = false>(options: Options<UpsertLearnableLanguageApiLearnableLanguagesPostData, ThrowOnError>) => (options.client ?? client).post<UpsertLearnableLanguageApiLearnableLanguagesPostResponses, UpsertLearnableLanguageApiLearnableLanguagesPostErrors, ThrowOnError>({
|
||||
security: [{ scheme: 'bearer', type: 'http' }],
|
||||
url: '/api/learnable_languages',
|
||||
...options,
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
...options.headers
|
||||
}
|
||||
});
|
||||
|
||||
/**
|
||||
* List Articles
|
||||
*/
|
||||
export const listArticlesBffArticlesGet = <ThrowOnError extends boolean = false>(options?: Options<ListArticlesBffArticlesGetData, ThrowOnError>) => (options?.client ?? client).get<ListArticlesBffArticlesGetResponses, ListArticlesBffArticlesGetErrors, ThrowOnError>({
|
||||
security: [{ scheme: 'bearer', type: 'http' }],
|
||||
url: '/bff/articles',
|
||||
...options
|
||||
});
|
||||
|
||||
/**
|
||||
* Get Article
|
||||
*/
|
||||
export const getArticleBffArticlesArticleIdGet = <ThrowOnError extends boolean = false>(options: Options<GetArticleBffArticlesArticleIdGetData, ThrowOnError>) => (options.client ?? client).get<GetArticleBffArticlesArticleIdGetResponses, GetArticleBffArticlesArticleIdGetErrors, ThrowOnError>({
|
||||
security: [{ scheme: 'bearer', type: 'http' }],
|
||||
url: '/bff/articles/{article_id}',
|
||||
...options
|
||||
});
|
||||
|
||||
/**
|
||||
* Get User Profile
|
||||
*/
|
||||
export const getUserProfileBffUserProfileGet = <ThrowOnError extends boolean = false>(options?: Options<GetUserProfileBffUserProfileGetData, ThrowOnError>) => (options?.client ?? client).get<GetUserProfileBffUserProfileGetResponses, unknown, ThrowOnError>({
|
||||
security: [{ scheme: 'bearer', type: 'http' }],
|
||||
url: '/bff/user_profile',
|
||||
...options
|
||||
});
|
||||
|
||||
/**
|
||||
* Register
|
||||
|
|
|
|||
|
|
@ -5,31 +5,105 @@ export type ClientOptions = {
|
|||
};
|
||||
|
||||
/**
|
||||
* ArticleResponse
|
||||
* ArticleDetail
|
||||
*/
|
||||
export type ArticleResponse = {
|
||||
export type ArticleDetail = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* Published At
|
||||
*/
|
||||
published_at: string;
|
||||
/**
|
||||
* Source Language
|
||||
*/
|
||||
source_language: string;
|
||||
/**
|
||||
* Source Title
|
||||
*/
|
||||
source_title: string;
|
||||
/**
|
||||
* Source Body
|
||||
*/
|
||||
source_body: string;
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language: string;
|
||||
/**
|
||||
* Complexity Level
|
||||
* Target Complexities
|
||||
*/
|
||||
complexity_level: string;
|
||||
target_complexities: Array<string>;
|
||||
/**
|
||||
* Input Texts
|
||||
* Target Title
|
||||
*/
|
||||
input_texts: Array<string>;
|
||||
target_title: string;
|
||||
/**
|
||||
* Target Body
|
||||
*/
|
||||
target_body: string;
|
||||
/**
|
||||
* Target Audio Url
|
||||
*/
|
||||
target_audio_url: string | null;
|
||||
/**
|
||||
* Target Body Pos
|
||||
*/
|
||||
target_body_pos: {
|
||||
[key: string]: unknown;
|
||||
} | null;
|
||||
/**
|
||||
* Target Body Transcript
|
||||
*/
|
||||
target_body_transcript: {
|
||||
[key: string]: unknown;
|
||||
} | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* ArticlesResponse
|
||||
* ArticleItem
|
||||
*/
|
||||
export type ArticlesResponse = {
|
||||
export type ArticleItem = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* Published At
|
||||
*/
|
||||
published_at: string;
|
||||
/**
|
||||
* Source Language
|
||||
*/
|
||||
source_language: string;
|
||||
/**
|
||||
* Source Title
|
||||
*/
|
||||
source_title: string;
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language: string;
|
||||
/**
|
||||
* Target Complexities
|
||||
*/
|
||||
target_complexities: Array<string>;
|
||||
/**
|
||||
* Target Title
|
||||
*/
|
||||
target_title: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* ArticleListResponse
|
||||
*/
|
||||
export type ArticleListResponse = {
|
||||
/**
|
||||
* Articles
|
||||
*/
|
||||
articles: Array<ArticleResponse>;
|
||||
articles: Array<ArticleItem>;
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -186,6 +260,68 @@ export type JobSummary = {
|
|||
error_message?: string | null;
|
||||
};
|
||||
|
||||
/**
|
||||
* LearnableLanguageItem
|
||||
*/
|
||||
export type LearnableLanguageItem = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* Source Language
|
||||
*/
|
||||
source_language: string;
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language: string;
|
||||
/**
|
||||
* Proficiencies
|
||||
*/
|
||||
proficiencies: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* LearnableLanguageRequest
|
||||
*/
|
||||
export type LearnableLanguageRequest = {
|
||||
/**
|
||||
* Source Language
|
||||
*/
|
||||
source_language: string;
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language: string;
|
||||
/**
|
||||
* Proficiencies
|
||||
*/
|
||||
proficiencies: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* LearnableLanguageResponse
|
||||
*/
|
||||
export type LearnableLanguageResponse = {
|
||||
/**
|
||||
* Id
|
||||
*/
|
||||
id: string;
|
||||
/**
|
||||
* Source Language
|
||||
*/
|
||||
source_language: string;
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language: string;
|
||||
/**
|
||||
* Proficiencies
|
||||
*/
|
||||
proficiencies: Array<string>;
|
||||
};
|
||||
|
||||
/**
|
||||
* LoginRequest
|
||||
*/
|
||||
|
|
@ -304,6 +440,16 @@ export type TranslationResponse = {
|
|||
translated_text: string;
|
||||
};
|
||||
|
||||
/**
|
||||
* UserProfileResponse
|
||||
*/
|
||||
export type UserProfileResponse = {
|
||||
/**
|
||||
* Learnable Languages
|
||||
*/
|
||||
learnable_languages: Array<LearnableLanguageItem>;
|
||||
};
|
||||
|
||||
/**
|
||||
* ValidationError
|
||||
*/
|
||||
|
|
@ -500,21 +646,106 @@ export type RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses = {
|
|||
|
||||
export type RegenerateAudioApiJobsJobIdRegenerateAudioPostResponse = RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses[keyof RegenerateAudioApiJobsJobIdRegenerateAudioPostResponses];
|
||||
|
||||
export type GetArticlesBffArticlesGetData = {
|
||||
body?: never;
|
||||
export type UpsertLearnableLanguageApiLearnableLanguagesPostData = {
|
||||
body: LearnableLanguageRequest;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/bff/articles';
|
||||
url: '/api/learnable_languages';
|
||||
};
|
||||
|
||||
export type GetArticlesBffArticlesGetResponses = {
|
||||
export type UpsertLearnableLanguageApiLearnableLanguagesPostErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type UpsertLearnableLanguageApiLearnableLanguagesPostError = UpsertLearnableLanguageApiLearnableLanguagesPostErrors[keyof UpsertLearnableLanguageApiLearnableLanguagesPostErrors];
|
||||
|
||||
export type UpsertLearnableLanguageApiLearnableLanguagesPostResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: ArticlesResponse;
|
||||
200: LearnableLanguageResponse;
|
||||
};
|
||||
|
||||
export type GetArticlesBffArticlesGetResponse = GetArticlesBffArticlesGetResponses[keyof GetArticlesBffArticlesGetResponses];
|
||||
export type UpsertLearnableLanguageApiLearnableLanguagesPostResponse = UpsertLearnableLanguageApiLearnableLanguagesPostResponses[keyof UpsertLearnableLanguageApiLearnableLanguagesPostResponses];
|
||||
|
||||
export type ListArticlesBffArticlesGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: {
|
||||
/**
|
||||
* Target Language
|
||||
*/
|
||||
target_language?: string;
|
||||
};
|
||||
url: '/bff/articles';
|
||||
};
|
||||
|
||||
export type ListArticlesBffArticlesGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type ListArticlesBffArticlesGetError = ListArticlesBffArticlesGetErrors[keyof ListArticlesBffArticlesGetErrors];
|
||||
|
||||
export type ListArticlesBffArticlesGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: ArticleListResponse;
|
||||
};
|
||||
|
||||
export type ListArticlesBffArticlesGetResponse = ListArticlesBffArticlesGetResponses[keyof ListArticlesBffArticlesGetResponses];
|
||||
|
||||
export type GetArticleBffArticlesArticleIdGetData = {
|
||||
body?: never;
|
||||
path: {
|
||||
/**
|
||||
* Article Id
|
||||
*/
|
||||
article_id: string;
|
||||
};
|
||||
query?: never;
|
||||
url: '/bff/articles/{article_id}';
|
||||
};
|
||||
|
||||
export type GetArticleBffArticlesArticleIdGetErrors = {
|
||||
/**
|
||||
* Validation Error
|
||||
*/
|
||||
422: HttpValidationError;
|
||||
};
|
||||
|
||||
export type GetArticleBffArticlesArticleIdGetError = GetArticleBffArticlesArticleIdGetErrors[keyof GetArticleBffArticlesArticleIdGetErrors];
|
||||
|
||||
export type GetArticleBffArticlesArticleIdGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: ArticleDetail;
|
||||
};
|
||||
|
||||
export type GetArticleBffArticlesArticleIdGetResponse = GetArticleBffArticlesArticleIdGetResponses[keyof GetArticleBffArticlesArticleIdGetResponses];
|
||||
|
||||
export type GetUserProfileBffUserProfileGetData = {
|
||||
body?: never;
|
||||
path?: never;
|
||||
query?: never;
|
||||
url: '/bff/user_profile';
|
||||
};
|
||||
|
||||
export type GetUserProfileBffUserProfileGetResponses = {
|
||||
/**
|
||||
* Successful Response
|
||||
*/
|
||||
200: UserProfileResponse;
|
||||
};
|
||||
|
||||
export type GetUserProfileBffUserProfileGetResponse = GetUserProfileBffUserProfileGetResponses[keyof GetUserProfileBffUserProfileGetResponses];
|
||||
|
||||
export type RegisterAuthRegisterPostData = {
|
||||
body: RegisterRequest;
|
||||
|
|
|
|||
140
frontend/src/lib/components/TopNav.svelte
Normal file
140
frontend/src/lib/components/TopNav.svelte
Normal file
|
|
@ -0,0 +1,140 @@
|
|||
<script lang="ts">
|
||||
import { page } from '$app/state';
|
||||
|
||||
const isActive = (prefix: string) => page.url.pathname.startsWith(prefix);
|
||||
</script>
|
||||
|
||||
<header class="topnav" role="banner">
|
||||
<div class="topnav-inner">
|
||||
<a href="/app" class="wordmark" aria-label="Home">
|
||||
<span class="wordmark-text">Language Learning App</span>
|
||||
</a>
|
||||
|
||||
<nav aria-label="Main navigation">
|
||||
<ul class="nav-links" role="list">
|
||||
<li>
|
||||
<a
|
||||
href="/app/articles"
|
||||
class="nav-link"
|
||||
class:is-active={isActive('/app/articles')}
|
||||
aria-current={isActive('/app/articles') ? 'page' : undefined}
|
||||
>
|
||||
Articles
|
||||
</a>
|
||||
</li>
|
||||
<li>
|
||||
<a
|
||||
href="/app/profile"
|
||||
class="nav-link"
|
||||
class:is-active={isActive('/app/profile')}
|
||||
aria-current={isActive('/app/profile') ? 'page' : undefined}
|
||||
>
|
||||
Profile
|
||||
</a>
|
||||
</li>
|
||||
</ul>
|
||||
</nav>
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<style>
|
||||
.topnav {
|
||||
position: sticky;
|
||||
top: 0;
|
||||
z-index: 100;
|
||||
background-color: var(--glass-bg);
|
||||
backdrop-filter: blur(var(--glass-blur));
|
||||
-webkit-backdrop-filter: blur(var(--glass-blur));
|
||||
box-shadow: 0 1px 0 color-mix(in srgb, var(--color-outline-variant) 35%, transparent);
|
||||
}
|
||||
|
||||
.topnav-inner {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: var(--space-6);
|
||||
max-width: 82rem;
|
||||
margin: 0 auto;
|
||||
padding: 0 var(--space-6);
|
||||
height: 3.25rem;
|
||||
}
|
||||
|
||||
/* --- Wordmark --- */
|
||||
|
||||
.wordmark {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
text-decoration: none;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.mark {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: 1.625rem;
|
||||
height: 1.625rem;
|
||||
border-radius: 50%;
|
||||
background-color: var(--color-primary);
|
||||
color: var(--color-on-primary);
|
||||
font-family: var(--font-display);
|
||||
font-size: 0.75rem;
|
||||
font-weight: var(--weight-bold);
|
||||
letter-spacing: 0;
|
||||
flex-shrink: 0;
|
||||
}
|
||||
|
||||
.wordmark-text {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-body-md);
|
||||
font-weight: var(--weight-semibold);
|
||||
color: var(--color-on-surface);
|
||||
letter-spacing: var(--tracking-wide);
|
||||
}
|
||||
|
||||
/* --- Nav links --- */
|
||||
|
||||
.nav-links {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-1);
|
||||
list-style: none;
|
||||
}
|
||||
|
||||
.nav-link {
|
||||
display: inline-block;
|
||||
padding: var(--space-1) var(--space-3);
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-label-lg);
|
||||
font-weight: var(--weight-medium);
|
||||
letter-spacing: var(--tracking-wide);
|
||||
color: var(--color-on-surface-variant);
|
||||
text-decoration: none;
|
||||
border-radius: var(--radius-md);
|
||||
transition:
|
||||
color var(--duration-fast) var(--ease-standard),
|
||||
background-color var(--duration-fast) var(--ease-standard);
|
||||
}
|
||||
|
||||
.nav-link:hover {
|
||||
color: var(--color-on-surface);
|
||||
background-color: color-mix(in srgb, var(--color-on-surface) 5%, transparent);
|
||||
}
|
||||
|
||||
.nav-link.is-active {
|
||||
color: var(--color-primary);
|
||||
background-color: color-mix(in srgb, var(--color-primary) 8%, transparent);
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.topnav-inner {
|
||||
padding: 0 var(--space-4);
|
||||
gap: var(--space-3);
|
||||
}
|
||||
|
||||
.wordmark-text {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
File diff suppressed because one or more lines are too long
9
frontend/src/routes/app/+layout.svelte
Normal file
9
frontend/src/routes/app/+layout.svelte
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
<script lang="ts">
|
||||
import TopNav from '$lib/components/TopNav.svelte';
|
||||
|
||||
const { children } = $props();
|
||||
</script>
|
||||
|
||||
<TopNav />
|
||||
|
||||
{@render children()}
|
||||
|
|
@ -1,6 +1,172 @@
|
|||
<h1>App</h1>
|
||||
<script lang="ts">
|
||||
const hour = new Date().getHours();
|
||||
const greeting = hour < 12 ? 'Good morning' : hour < 17 ? 'Good afternoon' : 'Good evening';
|
||||
</script>
|
||||
|
||||
<menu>
|
||||
<li><a href="/app/generate/summary">Generate Summary Job</a></li>
|
||||
<li><a href="/app/jobs">Jobs</a></li>
|
||||
</menu>
|
||||
<div class="page">
|
||||
<div class="hero">
|
||||
<p class="eyebrow label-md">Dashboard</p>
|
||||
<h1 class="hero-heading">{greeting}.</h1>
|
||||
<p class="hero-sub">What will you learn today?</p>
|
||||
</div>
|
||||
|
||||
<div class="card-grid">
|
||||
<a href="/app/articles" class="card card--primary">
|
||||
<div class="card-kicker label-md">Read</div>
|
||||
<h2 class="card-title">Articles</h2>
|
||||
<p class="card-body">Browse your reading library and practice with word-by-word translations.</p>
|
||||
<span class="card-cta" aria-hidden="true">Open library →</span>
|
||||
</a>
|
||||
|
||||
<a href="/app/generate/summary" class="card">
|
||||
<div class="card-kicker label-md">Create</div>
|
||||
<h2 class="card-title">New article</h2>
|
||||
<p class="card-body">Generate a new reading from any text in the language you're learning.</p>
|
||||
<span class="card-cta" aria-hidden="true">Get started →</span>
|
||||
</a>
|
||||
|
||||
<a href="/app/jobs" class="card">
|
||||
<div class="card-kicker label-md">History</div>
|
||||
<h2 class="card-title">Jobs</h2>
|
||||
<p class="card-body">Review the status of your generation jobs and access completed content.</p>
|
||||
<span class="card-cta" aria-hidden="true">View jobs →</span>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.page {
|
||||
max-width: 60rem;
|
||||
margin: 0 auto;
|
||||
padding: var(--space-12) var(--space-6) var(--space-8);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-10);
|
||||
}
|
||||
|
||||
/* --- Hero --- */
|
||||
|
||||
.hero {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
}
|
||||
|
||||
.eyebrow {
|
||||
color: var(--color-on-surface-variant);
|
||||
}
|
||||
|
||||
.hero-heading {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-display-md);
|
||||
font-weight: var(--weight-bold);
|
||||
line-height: var(--leading-tight);
|
||||
letter-spacing: var(--tracking-tight);
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
.hero-sub {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-lg);
|
||||
color: var(--color-on-surface-variant);
|
||||
margin-top: var(--space-1);
|
||||
}
|
||||
|
||||
/* --- Card grid --- */
|
||||
|
||||
.card-grid {
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fill, minmax(16rem, 1fr));
|
||||
gap: var(--space-4);
|
||||
}
|
||||
|
||||
/* --- Card --- */
|
||||
|
||||
.card {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-6);
|
||||
background-color: var(--color-surface-container-low);
|
||||
border-radius: var(--radius-xl);
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
transition: background-color var(--duration-fast) var(--ease-standard);
|
||||
}
|
||||
|
||||
.card:hover {
|
||||
background-color: var(--color-surface-container);
|
||||
}
|
||||
|
||||
.card--primary {
|
||||
background-color: var(--color-primary-container);
|
||||
}
|
||||
|
||||
.card--primary:hover {
|
||||
background-color: color-mix(in srgb, var(--color-primary-container) 80%, var(--color-primary));
|
||||
}
|
||||
|
||||
.card-kicker {
|
||||
color: var(--color-on-surface-variant);
|
||||
margin-bottom: var(--space-1);
|
||||
}
|
||||
|
||||
.card--primary .card-kicker {
|
||||
color: var(--color-on-primary-container);
|
||||
opacity: 0.75;
|
||||
}
|
||||
|
||||
.card-title {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-headline-md);
|
||||
font-weight: var(--weight-semibold);
|
||||
line-height: var(--leading-snug);
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
.card--primary .card-title {
|
||||
color: var(--color-on-primary-container);
|
||||
}
|
||||
|
||||
.card-body {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-sm);
|
||||
color: var(--color-on-surface-variant);
|
||||
line-height: var(--leading-relaxed);
|
||||
flex: 1;
|
||||
}
|
||||
|
||||
.card--primary .card-body {
|
||||
color: var(--color-on-primary-container);
|
||||
opacity: 0.8;
|
||||
}
|
||||
|
||||
.card-cta {
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-label-lg);
|
||||
font-weight: var(--weight-medium);
|
||||
color: var(--color-primary);
|
||||
margin-top: var(--space-2);
|
||||
}
|
||||
|
||||
.card--primary .card-cta {
|
||||
color: var(--color-on-primary-container);
|
||||
}
|
||||
|
||||
/* --- Responsive --- */
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.page {
|
||||
padding: var(--space-8) var(--space-4) var(--space-6);
|
||||
gap: var(--space-8);
|
||||
}
|
||||
|
||||
.hero-heading {
|
||||
font-size: var(--text-headline-lg);
|
||||
}
|
||||
|
||||
.card-grid {
|
||||
grid-template-columns: 1fr;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
|
|||
18
frontend/src/routes/app/articles/+page.server.ts
Normal file
18
frontend/src/routes/app/articles/+page.server.ts
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
import type { PageServerLoad } from './$types';
|
||||
import { listArticlesBffArticlesGet } from '../../../client/sdk.gen.ts';
|
||||
|
||||
export const load: PageServerLoad = async ({ locals }) => {
|
||||
const { data, response } = await listArticlesBffArticlesGet({
|
||||
headers: { Authorization: `Bearer ${locals.authToken ?? ''}` }
|
||||
});
|
||||
|
||||
if (!data || response.status !== 200) {
|
||||
return { articles: [] };
|
||||
}
|
||||
|
||||
const articles = [...data.articles].sort(
|
||||
(a, b) => new Date(b.published_at).getTime() - new Date(a.published_at).getTime()
|
||||
);
|
||||
|
||||
return { articles };
|
||||
};
|
||||
175
frontend/src/routes/app/articles/+page.svelte
Normal file
175
frontend/src/routes/app/articles/+page.svelte
Normal file
|
|
@ -0,0 +1,175 @@
|
|||
<script lang="ts">
|
||||
import type { PageProps } from './$types';
|
||||
|
||||
const { data }: PageProps = $props();
|
||||
|
||||
const languageNames: Record<string, string> = {
|
||||
en: 'English',
|
||||
fr: 'French',
|
||||
es: 'Spanish',
|
||||
it: 'Italian',
|
||||
de: 'German',
|
||||
pt: 'Portuguese',
|
||||
ja: 'Japanese',
|
||||
zh: 'Chinese',
|
||||
ko: 'Korean'
|
||||
};
|
||||
|
||||
const lang = (code: string) => languageNames[code] ?? code.toUpperCase();
|
||||
|
||||
const fmt = (iso: string) =>
|
||||
new Intl.DateTimeFormat('en-GB', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
}).format(new Date(iso));
|
||||
</script>
|
||||
|
||||
<div class="page">
|
||||
<header class="page-header">
|
||||
<p class="form-eyebrow">Reading</p>
|
||||
<h1 class="form-title">Articles</h1>
|
||||
</header>
|
||||
|
||||
{#if data.articles.length > 0}
|
||||
<ul class="article-list" role="list">
|
||||
{#each data.articles as article}
|
||||
<li class="article-item">
|
||||
<a href="/app/articles/{article.id}" class="article-link">
|
||||
<div class="article-meta">
|
||||
<span class="label-md meta-lang">{lang(article.target_language)}</span>
|
||||
{#each article.target_complexities as level}
|
||||
<span class="complexity-tag label-md">{level}</span>
|
||||
{/each}
|
||||
</div>
|
||||
<h2 class="article-title">{article.target_title}</h2>
|
||||
<p class="article-source">{article.source_title}</p>
|
||||
<time class="article-date label-md" datetime={article.published_at}>{fmt(article.published_at)}</time>
|
||||
</a>
|
||||
</li>
|
||||
{/each}
|
||||
</ul>
|
||||
{:else}
|
||||
<div class="empty-state">
|
||||
<p class="empty-heading">No articles yet</p>
|
||||
<p class="empty-body">Generate content to see articles here.</p>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.page {
|
||||
max-width: 52rem;
|
||||
margin: 0 auto;
|
||||
padding: var(--space-8) var(--space-6);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-6);
|
||||
}
|
||||
|
||||
/* --- Article list --- */
|
||||
|
||||
.article-list {
|
||||
list-style: none;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-3);
|
||||
}
|
||||
|
||||
.article-item {
|
||||
background-color: var(--color-surface-container-low);
|
||||
border-radius: var(--radius-lg);
|
||||
transition: background-color var(--duration-fast) var(--ease-standard);
|
||||
}
|
||||
|
||||
.article-item:hover {
|
||||
background-color: var(--color-surface-container);
|
||||
}
|
||||
|
||||
.article-link {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
padding: var(--space-5) var(--space-6);
|
||||
text-decoration: none;
|
||||
color: inherit;
|
||||
}
|
||||
|
||||
/* --- Article metadata --- */
|
||||
|
||||
.article-meta {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-2);
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
.meta-lang {
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.complexity-tag {
|
||||
color: var(--color-on-surface-variant);
|
||||
background-color: var(--color-surface-container-high);
|
||||
padding: 0.15em 0.5em;
|
||||
border-radius: var(--radius-full);
|
||||
}
|
||||
|
||||
/* --- Article content --- */
|
||||
|
||||
.article-title {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-headline-md);
|
||||
font-weight: var(--weight-semibold);
|
||||
line-height: var(--leading-snug);
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
.article-source {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-sm);
|
||||
color: var(--color-on-surface-variant);
|
||||
font-style: italic;
|
||||
}
|
||||
|
||||
.article-date {
|
||||
color: var(--color-on-surface-variant);
|
||||
margin-top: var(--space-1);
|
||||
}
|
||||
|
||||
/* --- Empty state --- */
|
||||
|
||||
.empty-state {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
align-items: center;
|
||||
gap: var(--space-3);
|
||||
padding: var(--space-10) var(--space-6);
|
||||
text-align: center;
|
||||
background-color: var(--color-surface-container-low);
|
||||
border-radius: var(--radius-xl);
|
||||
}
|
||||
|
||||
.empty-heading {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-title-lg);
|
||||
font-weight: var(--weight-semibold);
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
.empty-body {
|
||||
font-size: var(--text-body-md);
|
||||
color: var(--color-on-surface-variant);
|
||||
max-width: 20rem;
|
||||
}
|
||||
|
||||
@media (max-width: 640px) {
|
||||
.page {
|
||||
padding: var(--space-6) var(--space-4);
|
||||
}
|
||||
|
||||
.article-link {
|
||||
padding: var(--space-4);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
import { error } from '@sveltejs/kit';
|
||||
import type { PageServerLoad } from './$types';
|
||||
import { getArticleBffArticlesArticleIdGet } from '../../../../client/sdk.gen.ts';
|
||||
import { PUBLIC_API_BASE_URL } from '$env/static/public';
|
||||
|
||||
export const load: PageServerLoad = async ({ params, locals }) => {
|
||||
const { data, response } = await getArticleBffArticlesArticleIdGet({
|
||||
headers: { Authorization: `Bearer ${locals.authToken ?? ''}` },
|
||||
path: { article_id: params.article_id }
|
||||
});
|
||||
|
||||
if (!data || response.status !== 200) {
|
||||
error(response.status === 404 ? 404 : 500, 'Article not found');
|
||||
}
|
||||
|
||||
const audioUrl = data.target_audio_url
|
||||
? `${PUBLIC_API_BASE_URL}/media/${data.target_audio_url}`
|
||||
: null;
|
||||
|
||||
return { article: data, audioUrl };
|
||||
};
|
||||
578
frontend/src/routes/app/articles/[article_id]/+page.svelte
Normal file
578
frontend/src/routes/app/articles/[article_id]/+page.svelte
Normal file
|
|
@ -0,0 +1,578 @@
|
|||
<script lang="ts">
|
||||
import type { PageProps } from './$types';
|
||||
|
||||
const { data }: PageProps = $props();
|
||||
const { article, audioUrl } = data;
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Body parsing: split into paragraphs → sentences → tokens
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
type WordToken = { type: 'word'; text: string; wordIdx: number };
|
||||
type OtherToken = { type: 'other'; text: string };
|
||||
type Token = WordToken | OtherToken;
|
||||
|
||||
type Sentence = {
|
||||
tokens: Token[];
|
||||
idx: number; // global sentence index
|
||||
startWordIdx: number;
|
||||
endWordIdx: number;
|
||||
};
|
||||
|
||||
type Paragraph = { sentences: Sentence[] };
|
||||
|
||||
function parseBody(text: string): { paragraphs: Paragraph[]; totalWords: number } {
|
||||
const paragraphs: Paragraph[] = [];
|
||||
let wordIdx = 0;
|
||||
let sentenceIdx = 0;
|
||||
|
||||
for (const paraText of text.split(/\n\n+/)) {
|
||||
if (!paraText.trim()) continue;
|
||||
|
||||
// Split into alternating word / non-word tokens
|
||||
const rawTokens = paraText.match(/[\p{L}\p{N}\u2019'''-]+|[^\p{L}\p{N}\u2019'''-]+/gu) ?? [];
|
||||
|
||||
const sentences: Sentence[] = [];
|
||||
let currentTokens: Token[] = [];
|
||||
let startWordIdx = wordIdx;
|
||||
let hasWord = false;
|
||||
|
||||
for (const raw of rawTokens) {
|
||||
if (/[\p{L}\p{N}]/u.test(raw)) {
|
||||
currentTokens.push({ type: 'word', text: raw, wordIdx: wordIdx++ });
|
||||
hasWord = true;
|
||||
} else {
|
||||
currentTokens.push({ type: 'other', text: raw });
|
||||
// Flush sentence on sentence-ending punctuation
|
||||
if (hasWord && /[.!?]/.test(raw)) {
|
||||
sentences.push({
|
||||
tokens: [...currentTokens],
|
||||
idx: sentenceIdx++,
|
||||
startWordIdx,
|
||||
endWordIdx: wordIdx - 1
|
||||
});
|
||||
currentTokens = [];
|
||||
startWordIdx = wordIdx;
|
||||
hasWord = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (currentTokens.length > 0) {
|
||||
sentences.push({
|
||||
tokens: currentTokens,
|
||||
idx: sentenceIdx++,
|
||||
startWordIdx,
|
||||
endWordIdx: wordIdx - 1
|
||||
});
|
||||
}
|
||||
|
||||
if (sentences.length > 0) {
|
||||
paragraphs.push({ sentences });
|
||||
}
|
||||
}
|
||||
|
||||
return { paragraphs, totalWords: wordIdx };
|
||||
}
|
||||
|
||||
const { paragraphs } = parseBody(article.target_body);
|
||||
|
||||
// Flat sentence list for O(n) audio-time lookup
|
||||
const allSentences: Array<{ idx: number; startWordIdx: number; endWordIdx: number }> = [];
|
||||
for (const para of paragraphs) {
|
||||
for (const s of para.sentences) {
|
||||
allSentences.push({ idx: s.idx, startWordIdx: s.startWordIdx, endWordIdx: s.endWordIdx });
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Transcript: extract per-word timings from Deepgram response
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
type WordTiming = { start: number; end: number };
|
||||
|
||||
function extractWordTimings(transcript: Record<string, unknown> | null): WordTiming[] {
|
||||
if (!transcript) return [];
|
||||
try {
|
||||
const words = (transcript as any)?.results?.channels?.[0]?.alternatives?.[0]?.words;
|
||||
if (!Array.isArray(words)) return [];
|
||||
return words.map((w: any) => ({ start: Number(w.start), end: Number(w.end) }));
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
const wordTimings = extractWordTimings(article.target_body_transcript);
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Reactive state
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
let audioEl: HTMLAudioElement | null = $state(null);
|
||||
let activeSentenceIdx = $state(-1);
|
||||
let selectedWord: WordToken | null = $state(null);
|
||||
let translatedText: string | null = $state(null);
|
||||
let translating = $state(false);
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Audio: sentence highlighting
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
function handleTimeUpdate() {
|
||||
if (!audioEl || wordTimings.length === 0) return;
|
||||
const t = audioEl.currentTime;
|
||||
|
||||
// Find the word index at current playback time
|
||||
let wordIdx = -1;
|
||||
for (let i = 0; i < wordTimings.length; i++) {
|
||||
if (wordTimings[i].start <= t && t <= wordTimings[i].end) {
|
||||
wordIdx = i;
|
||||
break;
|
||||
}
|
||||
// Between words: use the most recently started word
|
||||
if (wordTimings[i].start > t) {
|
||||
wordIdx = i - 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (wordIdx < 0) return;
|
||||
|
||||
for (const s of allSentences) {
|
||||
if (s.startWordIdx <= wordIdx && wordIdx <= s.endWordIdx) {
|
||||
activeSentenceIdx = s.idx;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Word click: fetch translation
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
async function handleWordClick(token: WordToken) {
|
||||
selectedWord = token;
|
||||
translatedText = null;
|
||||
translating = true;
|
||||
|
||||
try {
|
||||
const params = new URLSearchParams({
|
||||
text: token.text,
|
||||
target_language: article.source_language
|
||||
});
|
||||
const res = await fetch(`/app/translate?${params}`);
|
||||
if (res.ok) {
|
||||
const body = await res.json();
|
||||
translatedText = body.translated_text ?? null;
|
||||
}
|
||||
} catch {
|
||||
translatedText = null;
|
||||
} finally {
|
||||
translating = false;
|
||||
}
|
||||
}
|
||||
|
||||
function closePanel() {
|
||||
selectedWord = null;
|
||||
translatedText = null;
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Display helpers
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
const languageNames: Record<string, string> = {
|
||||
en: 'English',
|
||||
fr: 'French',
|
||||
es: 'Spanish',
|
||||
it: 'Italian',
|
||||
de: 'German',
|
||||
pt: 'Portuguese',
|
||||
ja: 'Japanese',
|
||||
zh: 'Chinese',
|
||||
ko: 'Korean'
|
||||
};
|
||||
|
||||
const targetLang =
|
||||
languageNames[article.target_language] ?? article.target_language.toUpperCase();
|
||||
|
||||
const publishedDate = new Intl.DateTimeFormat('en-GB', {
|
||||
year: 'numeric',
|
||||
month: 'long',
|
||||
day: 'numeric'
|
||||
}).format(new Date(article.published_at));
|
||||
</script>
|
||||
|
||||
<!-- Reading progress bar (CSS scroll-driven animation) -->
|
||||
<div class="progress-bar" aria-hidden="true"></div>
|
||||
|
||||
<div class="page">
|
||||
<nav class="breadcrumb">
|
||||
<a href="/app/articles" class="link">← Articles</a>
|
||||
</nav>
|
||||
|
||||
<header class="article-header">
|
||||
<p class="article-eyebrow label-md">{targetLang} · {publishedDate}</p>
|
||||
<h1 class="article-title">{article.target_title}</h1>
|
||||
</header>
|
||||
|
||||
<div class="article-layout">
|
||||
<!-- Main content: audio + body -->
|
||||
<div class="article-main">
|
||||
{#if audioUrl}
|
||||
<div class="audio-section">
|
||||
<audio
|
||||
bind:this={audioEl}
|
||||
src={audioUrl}
|
||||
controls
|
||||
ontimeupdate={handleTimeUpdate}
|
||||
class="audio-player"
|
||||
>
|
||||
Your browser does not support the audio element.
|
||||
</audio>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
<div class="article-body" lang={article.target_language}>
|
||||
{#each paragraphs as para}
|
||||
<p class="paragraph">
|
||||
{#each para.sentences as sentence}<span
|
||||
class="sentence"
|
||||
class:sentence--active={activeSentenceIdx === sentence.idx}
|
||||
>{#each sentence.tokens as token}{#if token.type === 'word'}<button
|
||||
class="word"
|
||||
class:word--selected={selectedWord?.wordIdx === token.wordIdx}
|
||||
onclick={() => handleWordClick(token)}>{token.text}</button
|
||||
>{:else}{token.text}{/if}{/each}</span
|
||||
>{/each}
|
||||
</p>
|
||||
{/each}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Translation panel (desktop: sticky sidebar; mobile: bottom drawer) -->
|
||||
<aside
|
||||
class="translation-panel"
|
||||
class:is-open={selectedWord !== null}
|
||||
aria-label="Word translation"
|
||||
>
|
||||
{#if selectedWord}
|
||||
<div class="panel-header">
|
||||
<p class="panel-word">{selectedWord.text}</p>
|
||||
<button class="btn btn-ghost panel-close" onclick={closePanel} aria-label="Close panel">
|
||||
✕
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if translating}
|
||||
<div class="panel-loading">
|
||||
<div class="spinner" aria-hidden="true"></div>
|
||||
<span>Translating…</span>
|
||||
</div>
|
||||
{:else if translatedText}
|
||||
<p class="panel-translation">{translatedText}</p>
|
||||
<button class="btn btn-secondary panel-save" disabled aria-disabled="true">
|
||||
Add to flashcard
|
||||
</button>
|
||||
{:else}
|
||||
<p class="panel-error">Could not load translation.</p>
|
||||
{/if}
|
||||
{:else}
|
||||
<p class="panel-hint">Tap any word for a translation</p>
|
||||
{/if}
|
||||
</aside>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Mobile backdrop: closes the drawer when tapped outside -->
|
||||
<!-- svelte-ignore a11y_click_events_have_key_events a11y_no_static_element_interactions -->
|
||||
<div
|
||||
class="drawer-backdrop"
|
||||
class:is-visible={selectedWord !== null}
|
||||
onclick={closePanel}
|
||||
aria-hidden="true"
|
||||
></div>
|
||||
|
||||
<style>
|
||||
/* --- Reading progress bar (CSS scroll-driven animation) --- */
|
||||
/* Sits at the bottom edge of the sticky topnav (3.25rem) */
|
||||
.progress-bar {
|
||||
position: fixed;
|
||||
top: 3.25rem;
|
||||
left: 0;
|
||||
right: 0;
|
||||
height: 2px;
|
||||
background: var(--color-primary);
|
||||
transform-origin: left;
|
||||
transform: scaleX(0);
|
||||
animation: reading-progress linear both;
|
||||
animation-timeline: scroll(root);
|
||||
z-index: 99;
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
@keyframes reading-progress {
|
||||
from {
|
||||
transform: scaleX(0);
|
||||
}
|
||||
to {
|
||||
transform: scaleX(1);
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Page shell --- */
|
||||
.page {
|
||||
padding: var(--space-8) var(--space-6);
|
||||
max-width: 82rem;
|
||||
margin: 0 auto;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-5);
|
||||
}
|
||||
|
||||
/* --- Breadcrumb --- */
|
||||
.breadcrumb {
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-label-lg);
|
||||
}
|
||||
|
||||
/* --- Header --- */
|
||||
.article-header {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-2);
|
||||
padding-top: var(--space-2);
|
||||
}
|
||||
|
||||
.article-eyebrow {
|
||||
color: var(--color-on-surface-variant);
|
||||
}
|
||||
|
||||
.article-title {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-display-md);
|
||||
font-weight: var(--weight-bold);
|
||||
line-height: var(--leading-tight);
|
||||
letter-spacing: var(--tracking-tight);
|
||||
color: var(--color-on-surface);
|
||||
max-width: 38rem;
|
||||
}
|
||||
|
||||
/* --- Two-column layout --- */
|
||||
.article-layout {
|
||||
display: grid;
|
||||
grid-template-columns: 1fr;
|
||||
gap: var(--space-6);
|
||||
align-items: start;
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
.article-layout {
|
||||
grid-template-columns: 1fr 22rem;
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Audio --- */
|
||||
.audio-section {
|
||||
margin-bottom: var(--space-5);
|
||||
}
|
||||
|
||||
.audio-player {
|
||||
width: 100%;
|
||||
accent-color: var(--color-primary);
|
||||
}
|
||||
|
||||
/* --- Article body --- */
|
||||
.article-body {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-4);
|
||||
}
|
||||
|
||||
.paragraph {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-xl);
|
||||
line-height: 2;
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
/* Sentence: highlighted when audio is at that point */
|
||||
.sentence {
|
||||
border-radius: var(--radius-xs);
|
||||
transition: background-color var(--duration-normal) var(--ease-standard);
|
||||
}
|
||||
|
||||
.sentence--active {
|
||||
background-color: var(--color-primary-container);
|
||||
}
|
||||
|
||||
/* --- Word buttons --- */
|
||||
.word {
|
||||
display: inline;
|
||||
background: none;
|
||||
border: none;
|
||||
padding: 0 0.05em;
|
||||
margin: 0;
|
||||
font: inherit;
|
||||
color: inherit;
|
||||
cursor: pointer;
|
||||
border-radius: var(--radius-xs);
|
||||
transition:
|
||||
background-color var(--duration-fast) var(--ease-standard),
|
||||
color var(--duration-fast) var(--ease-standard);
|
||||
}
|
||||
|
||||
.word:hover {
|
||||
background-color: color-mix(in srgb, var(--color-primary) 12%, transparent);
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.word--selected {
|
||||
background-color: color-mix(in srgb, var(--color-primary) 20%, transparent);
|
||||
color: var(--color-primary);
|
||||
font-weight: var(--weight-medium);
|
||||
}
|
||||
|
||||
/* --- Translation panel: Desktop (sticky sidebar) --- */
|
||||
@media (min-width: 768px) {
|
||||
.translation-panel {
|
||||
position: sticky;
|
||||
top: var(--space-6);
|
||||
background-color: var(--color-surface-container-lowest);
|
||||
border-radius: var(--radius-xl);
|
||||
padding: var(--space-5);
|
||||
min-height: 16rem;
|
||||
box-shadow: var(--shadow-tonal-sm);
|
||||
}
|
||||
|
||||
.drawer-backdrop {
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Translation panel: Mobile (bottom drawer) --- */
|
||||
@media (max-width: 767px) {
|
||||
.translation-panel {
|
||||
position: fixed;
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
right: 0;
|
||||
z-index: 300;
|
||||
background-color: var(--color-surface-container-lowest);
|
||||
border-radius: var(--radius-xl) var(--radius-xl) 0 0;
|
||||
padding: var(--space-5) var(--space-5) calc(var(--space-5) + env(safe-area-inset-bottom));
|
||||
max-height: 55vh;
|
||||
overflow-y: auto;
|
||||
transform: translateY(100%);
|
||||
transition: transform var(--duration-slow) var(--ease-standard);
|
||||
box-shadow: 0 -8px 32px color-mix(in srgb, var(--color-on-surface) 8%, transparent);
|
||||
}
|
||||
|
||||
.translation-panel.is-open {
|
||||
transform: translateY(0);
|
||||
}
|
||||
|
||||
.drawer-backdrop {
|
||||
position: fixed;
|
||||
inset: 0;
|
||||
z-index: 200;
|
||||
background: color-mix(in srgb, var(--color-on-surface) 20%, transparent);
|
||||
opacity: 0;
|
||||
pointer-events: none;
|
||||
transition: opacity var(--duration-slow) var(--ease-standard);
|
||||
}
|
||||
|
||||
.drawer-backdrop.is-visible {
|
||||
opacity: 1;
|
||||
pointer-events: auto;
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Panel internals --- */
|
||||
.panel-header {
|
||||
display: flex;
|
||||
align-items: flex-start;
|
||||
justify-content: space-between;
|
||||
gap: var(--space-2);
|
||||
margin-bottom: var(--space-4);
|
||||
}
|
||||
|
||||
.panel-word {
|
||||
font-family: var(--font-display);
|
||||
font-size: var(--text-headline-md);
|
||||
font-weight: var(--weight-semibold);
|
||||
line-height: var(--leading-snug);
|
||||
color: var(--color-on-surface);
|
||||
}
|
||||
|
||||
.panel-close {
|
||||
flex-shrink: 0;
|
||||
color: var(--color-on-surface-variant);
|
||||
font-size: var(--text-body-lg);
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.panel-translation {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-lg);
|
||||
line-height: var(--leading-relaxed);
|
||||
color: var(--color-on-surface-variant);
|
||||
font-style: italic;
|
||||
margin-bottom: var(--space-4);
|
||||
}
|
||||
|
||||
.panel-save {
|
||||
width: 100%;
|
||||
padding-block: var(--space-2);
|
||||
opacity: 0.6;
|
||||
}
|
||||
|
||||
.panel-hint {
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-body-sm);
|
||||
color: var(--color-on-surface-variant);
|
||||
text-align: center;
|
||||
padding: var(--space-4) 0;
|
||||
}
|
||||
|
||||
.panel-error {
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-body-sm);
|
||||
color: var(--color-on-surface-variant);
|
||||
}
|
||||
|
||||
.panel-loading {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
gap: var(--space-3);
|
||||
font-family: var(--font-label);
|
||||
font-size: var(--text-body-sm);
|
||||
color: var(--color-on-surface-variant);
|
||||
}
|
||||
|
||||
.spinner {
|
||||
flex-shrink: 0;
|
||||
width: 1rem;
|
||||
height: 1rem;
|
||||
border: 2px solid var(--color-outline-variant);
|
||||
border-top-color: var(--color-primary);
|
||||
border-radius: 50%;
|
||||
animation: spin 0.9s linear infinite;
|
||||
}
|
||||
|
||||
@keyframes spin {
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
/* --- Responsive --- */
|
||||
@media (max-width: 640px) {
|
||||
.page {
|
||||
padding: var(--space-6) var(--space-4);
|
||||
}
|
||||
|
||||
.article-title {
|
||||
font-size: var(--text-headline-lg);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
|
@ -119,7 +119,7 @@
|
|||
{#if job.audio_transcript}
|
||||
<section class="content-section">
|
||||
<h2 class="section-title">Audio Transcript</h2>
|
||||
<div class="pos-text">{JSON.stringify(JSON.parse(job.audio_transcript as any), null, 2)}</div>
|
||||
<div class="pos-text">{JSON.stringify(job.audio_transcript, null, 2)}</div>
|
||||
</section>
|
||||
{/if}
|
||||
|
||||
|
|
@ -136,7 +136,6 @@
|
|||
<div class="pos-text">{JSON.stringify(job.translated_text_pos, null, 2)}</div>
|
||||
</section>
|
||||
{/if}
|
||||
|
||||
</div>
|
||||
|
||||
<style>
|
||||
|
|
@ -316,7 +315,6 @@
|
|||
overflow-y: scroll;
|
||||
max-height: 300px;
|
||||
white-space: pre-wrap;
|
||||
|
||||
}
|
||||
|
||||
/* --- Audio --- */
|
||||
|
|
|
|||
27
frontend/src/routes/app/profile/+page.svelte
Normal file
27
frontend/src/routes/app/profile/+page.svelte
Normal file
|
|
@ -0,0 +1,27 @@
|
|||
<div class="page">
|
||||
<header>
|
||||
<p class="form-eyebrow">Account</p>
|
||||
<h1 class="form-title">Profile</h1>
|
||||
</header>
|
||||
|
||||
<p class="coming-soon">Profile settings coming soon.</p>
|
||||
|
||||
<a href="/logout" class="btn btn-ghost">Sign out</a>
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.page {
|
||||
max-width: 40rem;
|
||||
margin: 0 auto;
|
||||
padding: var(--space-8) var(--space-6);
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: var(--space-5);
|
||||
}
|
||||
|
||||
.coming-soon {
|
||||
font-family: var(--font-body);
|
||||
font-size: var(--text-body-md);
|
||||
color: var(--color-on-surface-variant);
|
||||
}
|
||||
</style>
|
||||
20
frontend/src/routes/app/translate/+server.ts
Normal file
20
frontend/src/routes/app/translate/+server.ts
Normal file
|
|
@ -0,0 +1,20 @@
|
|||
import { json } from '@sveltejs/kit';
|
||||
import type { RequestHandler } from './$types';
|
||||
import { translateTextApiTranslateGet } from '../../../client/sdk.gen.ts';
|
||||
|
||||
export const GET: RequestHandler = async ({ url, locals }) => {
|
||||
const text = url.searchParams.get('text') ?? '';
|
||||
const targetLanguage = url.searchParams.get('target_language') ?? 'en';
|
||||
const context = url.searchParams.get('context') ?? undefined;
|
||||
|
||||
const { data, response } = await translateTextApiTranslateGet({
|
||||
headers: { Authorization: `Bearer ${locals.authToken ?? ''}` },
|
||||
query: { text, target_language: targetLanguage, context }
|
||||
});
|
||||
|
||||
if (!data || response.status !== 200) {
|
||||
return json({ error: 'Translation failed' }, { status: 500 });
|
||||
}
|
||||
|
||||
return json(data);
|
||||
};
|
||||
|
|
@ -31,7 +31,7 @@ export const actions = {
|
|||
expires: new Date(Date.now() + 7 * 24 * 60 * 60 * 1000) // 7 days
|
||||
});
|
||||
|
||||
return redirect(307, '/');
|
||||
return { success: true };
|
||||
}
|
||||
|
||||
return { success: response.status === 200, error: response.status !== 200 ? data : null };
|
||||
|
|
|
|||
|
|
@ -1,5 +1,14 @@
|
|||
<script lang="ts">
|
||||
import type { PageProps } from './$types';
|
||||
import { onMount } from 'svelte';
|
||||
import type { PageData, PageProps } from './$types';
|
||||
import { goto } from '$app/navigation';
|
||||
const { form }: PageProps = $props();
|
||||
|
||||
onMount(() => {
|
||||
if (form?.success) {
|
||||
goto('/app');
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<div class="page">
|
||||
|
|
|
|||
Loading…
Reference in a new issue