feat: Add overlay_json field for Quip text/sticker decorations

- Go: Add overlay_json to Post model, CreatePost handler, and all post queries
- Flutter: Rename QuipTextOverlay → QuipOverlayItem with type enum (text/sticker)
- QuipOverlayItem: add id field, content replaces text, backward-compat alias
- quip_video_item: parse overlay_json and render non-interactive overlays in feed
- quip_upload_provider: accept overlayJson param and pass to publishPost
- api_service: add overlayJson param to publish
This commit is contained in:
Patrick Britton 2026-02-17 20:06:53 -06:00
parent d26e63ca1b
commit 5b5e89e383
19 changed files with 1217 additions and 66 deletions

View file

@ -515,6 +515,7 @@ func (h *PostHandler) CreatePost(c *gin.Context) {
IsNSFW bool `json:"is_nsfw"`
NSFWReason string `json:"nsfw_reason"`
Visibility string `json:"visibility"`
OverlayJSON *string `json:"overlay_json"`
}
if err := c.ShouldBindJSON(&req); err != nil {
@ -616,6 +617,7 @@ func (h *PostHandler) CreatePost(c *gin.Context) {
NSFWReason: req.NSFWReason,
Lat: req.BeaconLat,
Long: req.BeaconLong,
OverlayJSON: req.OverlayJSON,
}
if req.CategoryID != nil {

View file

@ -39,6 +39,9 @@ type Post struct {
NSFWReason string `json:"nsfw_reason" db:"nsfw_reason"`
ExpiresAt *time.Time `json:"expires_at" db:"expires_at"`
// Quip overlay JSON — stores text/sticker decorations as client-rendered widgets
OverlayJSON *string `json:"overlay_json,omitempty" db:"overlay_json"`
// Link preview (populated via enrichment, not in every query)
LinkPreviewURL *string `json:"link_preview_url,omitempty" db:"link_preview_url"`
LinkPreviewTitle *string `json:"link_preview_title,omitempty" db:"link_preview_title"`

View file

@ -59,7 +59,7 @@ func (r *PostRepository) CreatePost(ctx context.Context, post *models.Post) erro
is_beacon, beacon_type, location, confidence_score,
is_active_beacon, allow_chain, chain_parent_id, visibility, expires_at,
is_nsfw, nsfw_reason,
severity, incident_status, radius
severity, incident_status, radius, overlay_json
) VALUES (
$1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13,
$14, $15,
@ -68,7 +68,7 @@ func (r *PostRepository) CreatePost(ctx context.Context, post *models.Post) erro
ELSE NULL END,
$18, $19, $20, $21, $22, $23,
$24, $25,
$26, $27, $28
$26, $27, $28, $29
) RETURNING id, created_at
`
@ -84,7 +84,7 @@ func (r *PostRepository) CreatePost(ctx context.Context, post *models.Post) erro
post.IsBeacon, post.BeaconType, post.Lat, post.Long, post.Confidence,
post.IsActiveBeacon, post.AllowChain, post.ChainParentID, post.Visibility, post.ExpiresAt,
post.IsNSFW, post.NSFWReason,
post.Severity, post.IncidentStatus, post.Radius,
post.Severity, post.IncidentStatus, post.Radius, post.OverlayJSON,
).Scan(&post.ID, &post.CreatedAt)
if err != nil {
@ -168,7 +168,8 @@ func (r *PostRepository) GetFeed(ctx context.Context, userID string, categorySlu
CASE WHEN ($4::text) != '' THEN COALESCE((SELECT jsonb_agg(emoji) FROM public.post_reactions WHERE post_id = p.id AND user_id = $4::text::uuid), '[]'::jsonb) ELSE '[]'::jsonb END as my_reactions,
COALESCE(p.is_nsfw, FALSE) as is_nsfw,
COALESCE(p.nsfw_reason, '') as nsfw_reason,
p.link_preview_url, p.link_preview_title, p.link_preview_description, p.link_preview_image_url, p.link_preview_site_name
p.link_preview_url, p.link_preview_title, p.link_preview_description, p.link_preview_image_url, p.link_preview_site_name,
p.overlay_json
FROM public.posts p
JOIN public.profiles pr ON p.author_id = pr.id
LEFT JOIN public.post_metrics m ON p.id = m.post_id
@ -220,6 +221,7 @@ func (r *PostRepository) GetFeed(ctx context.Context, userID string, categorySlu
&p.AllowChain, &p.Visibility, &p.Reactions, &p.MyReactions,
&p.IsNSFW, &p.NSFWReason,
&p.LinkPreviewURL, &p.LinkPreviewTitle, &p.LinkPreviewDescription, &p.LinkPreviewImageURL, &p.LinkPreviewSiteName,
&p.OverlayJSON,
)
if err != nil {
return nil, err
@ -358,7 +360,8 @@ func (r *PostRepository) GetPostByID(ctx context.Context, postID string, userID
p.allow_chain, p.visibility,
COALESCE(p.is_nsfw, FALSE) as is_nsfw,
COALESCE(p.nsfw_reason, '') as nsfw_reason,
p.link_preview_url, p.link_preview_title, p.link_preview_description, p.link_preview_image_url, p.link_preview_site_name
p.link_preview_url, p.link_preview_title, p.link_preview_description, p.link_preview_image_url, p.link_preview_site_name,
p.overlay_json
FROM public.posts p
JOIN public.profiles pr ON p.author_id = pr.id
LEFT JOIN public.post_metrics m ON p.id = m.post_id
@ -383,6 +386,7 @@ func (r *PostRepository) GetPostByID(ctx context.Context, postID string, userID
&p.AllowChain, &p.Visibility,
&p.IsNSFW, &p.NSFWReason,
&p.LinkPreviewURL, &p.LinkPreviewTitle, &p.LinkPreviewDescription, &p.LinkPreviewImageURL, &p.LinkPreviewSiteName,
&p.OverlayJSON,
)
if err != nil {
return nil, err

View file

@ -1,30 +1,43 @@
import 'package:flutter/material.dart';
/// Model for text overlays on Quip videos
class QuipTextOverlay {
final String text;
final Color color;
final Offset position; // Normalized 0.0-1.0 coordinates
final double scale;
final double rotation; // In radians
/// Type of overlay item on a Quip video.
enum QuipOverlayType { text, sticker }
const QuipTextOverlay({
required this.text,
required this.color,
required this.position,
/// A single overlay item (text or sticker/emoji) placed on a Quip video.
/// Position is normalized (0.01.0) relative to the video dimensions so it
/// renders correctly at any screen size.
class QuipOverlayItem {
final String id; // unique identifier for widget keying
final QuipOverlayType type;
final String content; // text string or emoji/sticker character
final Color color; // text color (default white)
final Offset position; // normalized 0.01.0
final double scale;
final double rotation; // radians
const QuipOverlayItem({
required this.id,
required this.type,
required this.content,
this.color = Colors.white,
this.position = const Offset(0.5, 0.5),
this.scale = 1.0,
this.rotation = 0.0,
});
QuipTextOverlay copyWith({
String? text,
QuipOverlayItem copyWith({
String? id,
QuipOverlayType? type,
String? content,
Color? color,
Offset? position,
double? scale,
double? rotation,
}) {
return QuipTextOverlay(
text: text ?? this.text,
return QuipOverlayItem(
id: id ?? this.id,
type: type ?? this.type,
content: content ?? this.content,
color: color ?? this.color,
position: position ?? this.position,
scale: scale ?? this.scale,
@ -34,7 +47,9 @@ class QuipTextOverlay {
Map<String, dynamic> toJson() {
return {
'text': text,
'id': id,
'type': type.name,
'content': content,
'color': color.value,
'position': {'x': position.dx, 'y': position.dy},
'scale': scale,
@ -42,9 +57,13 @@ class QuipTextOverlay {
};
}
factory QuipTextOverlay.fromJson(Map<String, dynamic> json) {
return QuipTextOverlay(
text: json['text'] as String,
factory QuipOverlayItem.fromJson(Map<String, dynamic> json) {
return QuipOverlayItem(
id: json['id'] as String? ?? UniqueKey().toString(),
type: QuipOverlayType.values.byName(
(json['type'] as String?) ?? 'text',
),
content: (json['content'] ?? json['text'] ?? '') as String,
color: Color(json['color'] as int),
position: Offset(
(json['position']['x'] as num).toDouble(),
@ -56,7 +75,11 @@ class QuipTextOverlay {
}
}
/// Placeholder for future music track functionality
/// Backward-compat alias so existing screens that reference QuipTextOverlay
/// do not require immediate migration.
typedef QuipTextOverlay = QuipOverlayItem;
/// Placeholder for music track metadata.
class MusicTrack {
final String id;
final String name;

View file

@ -42,7 +42,12 @@ class QuipUploadNotifier extends Notifier<QuipUploadState> {
return QuipUploadState(isUploading: false, progress: 0.0);
}
Future<void> startUpload(File videoFile, String caption, {double? thumbnailTimestampMs}) async {
Future<void> startUpload(
File videoFile,
String caption, {
double? thumbnailTimestampMs,
String? overlayJson,
}) async {
try {
state = state.copyWith(
isUploading: true, progress: 0.0, error: null, successMessage: null);
@ -105,10 +110,11 @@ class QuipUploadNotifier extends Notifier<QuipUploadState> {
// Publish post via Go API
await ApiService.instance.publishPost(
body: caption,
body: caption.isNotEmpty ? caption : ' ',
videoUrl: videoUrl,
thumbnailUrl: thumbnailUrl,
categoryId: null, // Default
overlayJson: overlayJson,
);
// Trigger feed refresh

View file

@ -171,7 +171,7 @@ class BeaconScreenState extends ConsumerState<BeaconScreen> with TickerProviderS
if (!_locationPermissionGranted) return;
setState(() => _isLoadingLocation = true);
try {
final position = await Geolocator.getCurrentPosition(desiredAccuracy: LocationAccuracy.high);
final position = await Geolocator.getCurrentPosition(desiredAccuracy: LocationAccuracy.low);
if (mounted) {
setState(() {
_userLocation = LatLng(position.latitude, position.longitude);

View file

@ -17,6 +17,8 @@ import 'package:shared_preferences/shared_preferences.dart';
import '../../widgets/radial_menu_overlay.dart';
import '../../widgets/onboarding_modal.dart';
import '../../widgets/offline_indicator.dart';
import '../../widgets/neighborhood/neighborhood_picker_sheet.dart';
import '../../services/api_service.dart';
import '../../providers/quip_upload_provider.dart';
import '../../providers/notification_provider.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
@ -61,7 +63,10 @@ class _HomeShellState extends ConsumerState<HomeShell> with WidgetsBindingObserv
_initNotificationListener();
_loadNavTapCounts();
WidgetsBinding.instance.addPostFrameCallback((_) {
if (mounted) OnboardingModal.showIfNeeded(context);
if (mounted) {
OnboardingModal.showIfNeeded(context);
_checkNeighborhoodOnboarding();
}
});
}
@ -87,6 +92,23 @@ class _HomeShellState extends ConsumerState<HomeShell> with WidgetsBindingObserv
}
}
Future<void> _checkNeighborhoodOnboarding() async {
try {
final data = await ApiService.instance.getMyNeighborhood();
if (data == null) return;
final onboarded = data['onboarded'] as bool? ?? false;
if (!onboarded && mounted) {
// Small delay so the onboarding modal (if shown) has time to appear first
await Future.delayed(const Duration(milliseconds: 800));
if (mounted) {
await NeighborhoodPickerSheet.show(context);
}
}
} catch (_) {
// Non-critical silently ignore if network unavailable
}
}
void _initNotificationListener() {
_notifSub = NotificationService.instance.foregroundMessages.listen((message) {
if (mounted) {

View file

@ -0,0 +1,385 @@
import 'dart:async';
import 'dart:io';
import 'package:camera/camera.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:permission_handler/permission_handler.dart';
import '../../../screens/audio/audio_library_screen.dart';
import '../../../theme/tokens.dart';
import '../../../theme/app_theme.dart';
import 'quip_decorate_screen.dart';
/// Stage 1 of the new Quip creation flow.
///
/// Full-screen camera preview with:
/// - Pre-record sound selection (top-center)
/// - Flash + flip camera controls (top-right)
/// - 10 s progress-ring record button (bottom-center)
/// Tap = start/stop toggle; Hold = hold-to-record
///
/// On stop (or auto-stop at 10 s), navigates to [QuipDecorateScreen].
class QuipCameraScreen extends StatefulWidget {
const QuipCameraScreen({super.key});
@override
State<QuipCameraScreen> createState() => _QuipCameraScreenState();
}
class _QuipCameraScreenState extends State<QuipCameraScreen>
with WidgetsBindingObserver {
static const Duration _maxDuration = Duration(seconds: 10);
static const Duration _tickInterval = Duration(milliseconds: 30);
// Camera
List<CameraDescription> _cameras = [];
CameraController? _cameraController;
bool _isRearCamera = true;
bool _isInitializing = true;
bool _flashOn = false;
// Recording
bool _isRecording = false;
double _progress = 0.0; // 0.01.0
Timer? _progressTicker;
Timer? _autoStopTimer;
DateTime? _recordStart;
// Pre-record audio
AudioTrack? _selectedAudio;
// Processing (brief moment between stop and navigate)
bool _isProcessing = false;
@override
void initState() {
super.initState();
WidgetsBinding.instance.addObserver(this);
_initCamera();
}
@override
void dispose() {
WidgetsBinding.instance.removeObserver(this);
_progressTicker?.cancel();
_autoStopTimer?.cancel();
_cameraController?.dispose();
super.dispose();
}
@override
void didChangeAppLifecycleState(AppLifecycleState state) {
if (_cameraController == null ||
!_cameraController!.value.isInitialized) return;
if (state == AppLifecycleState.inactive) {
_cameraController?.dispose();
} else if (state == AppLifecycleState.resumed) {
_initCamera();
}
}
// Camera init
Future<void> _initCamera() async {
setState(() => _isInitializing = true);
if (!kIsWeb) {
final status =
await [Permission.camera, Permission.microphone].request();
if (status[Permission.camera] != PermissionStatus.granted ||
status[Permission.microphone] != PermissionStatus.granted) {
if (mounted) {
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('Camera & microphone access required')),
);
Navigator.pop(context);
}
return;
}
}
try {
_cameras = await availableCameras();
if (_cameras.isEmpty) throw Exception('No cameras found');
final camera = _cameras.firstWhere(
(c) => c.lensDirection ==
(_isRearCamera
? CameraLensDirection.back
: CameraLensDirection.front),
orElse: () => _cameras.first,
);
_cameraController = CameraController(
camera,
ResolutionPreset.high,
enableAudio: true,
imageFormatGroup: ImageFormatGroup.yuv420,
);
await _cameraController!.initialize();
await _cameraController!.prepareForVideoRecording();
if (mounted) setState(() => _isInitializing = false);
} catch (e) {
if (mounted) setState(() => _isInitializing = false);
}
}
Future<void> _toggleCamera() async {
if (_isRecording) return;
setState(() {
_isRearCamera = !_isRearCamera;
_isInitializing = true;
});
await _cameraController?.dispose();
_cameraController = null;
_initCamera();
}
Future<void> _toggleFlash() async {
if (_cameraController == null) return;
try {
_flashOn = !_flashOn;
await _cameraController!
.setFlashMode(_flashOn ? FlashMode.torch : FlashMode.off);
setState(() {});
} catch (_) {}
}
// Audio
Future<void> _pickSound() async {
final track = await Navigator.push<AudioTrack>(
context,
MaterialPageRoute(builder: (_) => const AudioLibraryScreen()),
);
if (track != null && mounted) {
setState(() => _selectedAudio = track);
}
}
// Recording
Future<void> _startRecording() async {
if (_cameraController == null ||
!_cameraController!.value.isInitialized ||
_isRecording) return;
try {
await _cameraController!.startVideoRecording();
_recordStart = DateTime.now();
_autoStopTimer = Timer(_maxDuration, _stopRecording);
_progressTicker =
Timer.periodic(_tickInterval, (_) => _updateProgress());
if (mounted) setState(() => _isRecording = true);
} catch (_) {}
}
void _updateProgress() {
if (!mounted || _recordStart == null) return;
final elapsed = DateTime.now().difference(_recordStart!);
setState(() {
_progress =
(elapsed.inMilliseconds / _maxDuration.inMilliseconds).clamp(0.0, 1.0);
});
}
Future<void> _stopRecording() async {
if (!_isRecording) return;
_progressTicker?.cancel();
_autoStopTimer?.cancel();
try {
final xfile = await _cameraController!.stopVideoRecording();
if (!mounted) return;
setState(() {
_isRecording = false;
_progress = 0.0;
_isProcessing = true;
});
await _cameraController?.pausePreview();
final videoFile = File(xfile.path);
if (mounted) {
await Navigator.push(
context,
MaterialPageRoute(
builder: (_) => QuipDecorateScreen(
videoFile: videoFile,
preloadedAudio: _selectedAudio,
),
),
);
await _cameraController?.resumePreview();
if (mounted) setState(() => _isProcessing = false);
}
} catch (_) {
if (mounted) setState(() {_isRecording = false; _progress = 0.0; _isProcessing = false;});
}
}
void _onRecordTap() {
if (_isRecording) {
_stopRecording();
} else {
_startRecording();
}
}
// Build
@override
Widget build(BuildContext context) {
if (_isInitializing || _cameraController == null) {
return const Scaffold(
backgroundColor: SojornColors.basicBlack,
body: Center(child: CircularProgressIndicator(color: SojornColors.basicWhite)),
);
}
return Scaffold(
backgroundColor: SojornColors.basicBlack,
body: Stack(
fit: StackFit.expand,
children: [
// Full-screen camera preview
CameraPreview(_cameraController!),
// Processing overlay
if (_isProcessing)
const ColoredBox(
color: Color(0x88000000),
child: Center(child: CircularProgressIndicator(color: SojornColors.basicWhite)),
),
// Top bar
SafeArea(
child: Column(
children: [
Padding(
padding: const EdgeInsets.symmetric(horizontal: 8, vertical: 4),
child: Row(
crossAxisAlignment: CrossAxisAlignment.center,
children: [
// Close
IconButton(
icon: const Icon(Icons.close, color: SojornColors.basicWhite),
onPressed: () => Navigator.pop(context),
),
// Add Sound (center)
Expanded(
child: Center(
child: GestureDetector(
onTap: _pickSound,
child: Container(
padding: const EdgeInsets.symmetric(horizontal: 14, vertical: 7),
decoration: BoxDecoration(
border: Border.all(color: SojornColors.basicWhite.withValues(alpha: 0.7)),
borderRadius: BorderRadius.circular(20),
),
child: Row(
mainAxisSize: MainAxisSize.min,
children: [
const Icon(Icons.music_note, color: SojornColors.basicWhite, size: 16),
const SizedBox(width: 6),
Text(
_selectedAudio != null
? _selectedAudio!.title
: 'Add Sound',
style: const TextStyle(
color: SojornColors.basicWhite,
fontSize: 13,
fontWeight: FontWeight.w500,
),
maxLines: 1,
overflow: TextOverflow.ellipsis,
),
],
),
),
),
),
),
// Flash + Flip
Row(
children: [
IconButton(
icon: Icon(
_flashOn ? Icons.flash_on : Icons.flash_off,
color: SojornColors.basicWhite,
),
onPressed: _toggleFlash,
),
IconButton(
icon: const Icon(Icons.flip_camera_ios, color: SojornColors.basicWhite),
onPressed: _toggleCamera,
),
],
),
],
),
),
],
),
),
// Record button (bottom-center)
Positioned(
bottom: 56,
left: 0,
right: 0,
child: Center(child: _buildRecordButton()),
),
],
),
);
}
Widget _buildRecordButton() {
return GestureDetector(
onTap: _onRecordTap,
onLongPress: _startRecording,
onLongPressUp: _stopRecording,
child: SizedBox(
width: 88,
height: 88,
child: Stack(
alignment: Alignment.center,
children: [
// Progress ring
SizedBox(
width: 88,
height: 88,
child: CircularProgressIndicator(
value: _isRecording ? _progress : 0.0,
strokeWidth: 4,
backgroundColor: SojornColors.basicWhite.withValues(alpha: 0.3),
valueColor:
const AlwaysStoppedAnimation<Color>(SojornColors.destructive),
),
),
// Inner solid circle (slightly smaller)
Container(
width: 68,
height: 68,
decoration: BoxDecoration(
color: _isRecording
? SojornColors.destructive
: SojornColors.destructive,
shape: BoxShape.circle,
border: Border.all(
color: SojornColors.basicWhite,
width: _isRecording ? 0 : 3,
),
),
child: _isRecording
? const Icon(Icons.stop_rounded,
color: SojornColors.basicWhite, size: 32)
: null,
),
],
),
),
);
}
}

View file

@ -1,13 +1,14 @@
import 'package:flutter/material.dart';
import 'quip_recorder_screen.dart';
import 'quip_camera_screen.dart';
/// Entry point wrapper for the Quip Creation Flow.
/// Navigation is now handled linearly starting from [QuipRecorderScreen].
/// Routes to [QuipCameraScreen] the new Snapchat-style camera with
/// instant sticker/text decoration and zero encoding wait.
class QuipCreationFlow extends StatelessWidget {
const QuipCreationFlow({super.key});
@override
Widget build(BuildContext context) {
return const QuipRecorderScreen();
return const QuipCameraScreen();
}
}

View file

@ -0,0 +1,593 @@
import 'dart:convert';
import 'dart:io';
import 'package:flutter/material.dart';
import 'package:flutter_riverpod/flutter_riverpod.dart';
import 'package:video_player/video_player.dart';
import '../../../models/quip_text_overlay.dart';
import '../../../providers/quip_upload_provider.dart';
import '../../../screens/audio/audio_library_screen.dart';
import '../../../theme/tokens.dart';
import '../../../theme/app_theme.dart';
// Curated sticker/emoji set for the picker
const _kTextStickers = ['LOL', 'OMG', 'WOW', 'WAIT', 'FR?', 'NO WAY'];
const _kEmojis = [
'🎉', '🔥', '❤️', '😂', '💯', '',
'🤣', '😍', '🙌', '😮', '💕', '🤩',
'🎶', '🌟', '💀', '😎', '🥰', '🤔',
'👀', '🫶',
];
// Colors available for text overlays
const _kTextColors = [
Colors.white,
Colors.yellow,
Colors.cyan,
Colors.pinkAccent,
Colors.greenAccent,
Colors.redAccent,
];
/// Stage 2 of the new Quip creation flow.
///
/// The raw video loops immediately. The user decorates with:
/// - Draggable + pinch-to-scale/rotate text and sticker overlays
/// - Pre-recorded or newly-selected background audio
/// - A "Post Quip" FAB that fires a background upload and returns to the feed
class QuipDecorateScreen extends ConsumerStatefulWidget {
final File videoFile;
final AudioTrack? preloadedAudio;
const QuipDecorateScreen({
super.key,
required this.videoFile,
this.preloadedAudio,
});
@override
ConsumerState<QuipDecorateScreen> createState() => _QuipDecorateScreenState();
}
class _QuipDecorateScreenState extends ConsumerState<QuipDecorateScreen> {
late VideoPlayerController _controller;
bool _videoReady = false;
// Overlays
final List<_EditableOverlay> _overlays = [];
String? _draggingId; // id of the item being dragged/scaled
// Trash zone
bool _showTrash = false;
bool _overTrash = false;
// Audio
AudioTrack? _selectedAudio;
// Text color for next text item
Color _nextTextColor = Colors.white;
@override
void initState() {
super.initState();
_selectedAudio = widget.preloadedAudio;
_initVideo();
}
@override
void dispose() {
_controller.dispose();
super.dispose();
}
Future<void> _initVideo() async {
_controller = VideoPlayerController.file(widget.videoFile);
await _controller.initialize();
_controller.setLooping(true);
_controller.play();
if (mounted) setState(() => _videoReady = true);
}
// Overlay management
String _newId() => DateTime.now().microsecondsSinceEpoch.toString();
void _addTextOverlay(String text) {
if (text.trim().isEmpty) return;
setState(() {
_overlays.add(_EditableOverlay(
id: _newId(),
type: QuipOverlayType.text,
content: text.trim(),
color: _nextTextColor,
normalizedX: 0.5,
normalizedY: 0.4,
scale: 1.0,
rotation: 0.0,
));
});
}
void _addStickerOverlay(String sticker) {
setState(() {
_overlays.add(_EditableOverlay(
id: _newId(),
type: QuipOverlayType.sticker,
content: sticker,
color: Colors.white,
normalizedX: 0.5,
normalizedY: 0.5,
scale: 1.0,
rotation: 0.0,
));
});
}
void _removeOverlay(String id) {
setState(() => _overlays.removeWhere((o) => o.id == id));
}
// Actions
void _openTextSheet() {
final ctrl = TextEditingController();
showModalBottomSheet(
context: context,
isScrollControlled: true,
backgroundColor: const Color(0xDD000000),
builder: (ctx) => Padding(
padding: EdgeInsets.only(
bottom: MediaQuery.of(ctx).viewInsets.bottom + 16,
left: 16,
right: 16,
top: 16,
),
child: Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// Color row
Row(
children: _kTextColors.map((c) {
final selected = c == _nextTextColor;
return GestureDetector(
onTap: () => setState(() => _nextTextColor = c),
child: Container(
margin: const EdgeInsets.only(right: 8),
width: 28,
height: 28,
decoration: BoxDecoration(
color: c,
shape: BoxShape.circle,
border: selected
? Border.all(color: SojornColors.basicWhite, width: 2)
: null,
),
),
);
}).toList(),
),
const SizedBox(height: 12),
TextField(
controller: ctrl,
autofocus: true,
style: const TextStyle(color: SojornColors.basicWhite, fontSize: 22),
decoration: InputDecoration(
border: InputBorder.none,
hintText: 'Type something...',
hintStyle: TextStyle(color: SojornColors.basicWhite.withValues(alpha: 0.4)),
),
onSubmitted: (val) {
Navigator.pop(ctx);
_addTextOverlay(val);
},
),
Align(
alignment: Alignment.centerRight,
child: TextButton(
onPressed: () {
Navigator.pop(ctx);
_addTextOverlay(ctrl.text);
},
child: const Text('Done', style: TextStyle(color: SojornColors.basicWhite, fontSize: 16)),
),
),
],
),
),
);
}
void _openStickerSheet() {
showModalBottomSheet(
context: context,
backgroundColor: const Color(0xDD000000),
shape: const RoundedRectangleBorder(
borderRadius: BorderRadius.vertical(top: Radius.circular(16)),
),
builder: (ctx) => SafeArea(
child: Column(
mainAxisSize: MainAxisSize.min,
crossAxisAlignment: CrossAxisAlignment.start,
children: [
// Text stickers row
Padding(
padding: const EdgeInsets.fromLTRB(16, 16, 16, 8),
child: Wrap(
spacing: 10,
runSpacing: 10,
children: _kTextStickers.map((s) {
return GestureDetector(
onTap: () {
Navigator.pop(ctx);
_addStickerOverlay(s);
},
child: Container(
padding: const EdgeInsets.symmetric(horizontal: 14, vertical: 8),
decoration: BoxDecoration(
border: Border.all(color: SojornColors.basicWhite, width: 1.5),
borderRadius: BorderRadius.circular(8),
),
child: Text(s,
style: const TextStyle(
color: SojornColors.basicWhite,
fontWeight: FontWeight.bold,
fontSize: 14)),
),
);
}).toList(),
),
),
// Emoji grid
SizedBox(
height: 180,
child: GridView.count(
crossAxisCount: 7,
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 8),
children: _kEmojis.map((e) {
return GestureDetector(
onTap: () {
Navigator.pop(ctx);
_addStickerOverlay(e);
},
child: Center(
child: Text(e, style: const TextStyle(fontSize: 28)),
),
);
}).toList(),
),
),
],
),
),
);
}
Future<void> _pickSound() async {
final track = await Navigator.push<AudioTrack>(
context,
MaterialPageRoute(builder: (_) => const AudioLibraryScreen()),
);
if (track != null && mounted) {
setState(() => _selectedAudio = track);
}
}
Future<void> _postQuip() async {
_controller.pause();
// Build overlay + sound JSON payload
final payload = {
'overlays': _overlays.map((o) => o.toJson()).toList(),
if (_selectedAudio != null) 'sound_id': _selectedAudio!.path,
};
final overlayJson = jsonEncode(payload);
ref.read(quipUploadProvider.notifier).startUpload(
widget.videoFile,
'',
overlayJson: overlayJson,
);
if (mounted) {
Navigator.of(context).popUntil((route) => route.isFirst);
ScaffoldMessenger.of(context).showSnackBar(
const SnackBar(content: Text('Uploading your Quip...')),
);
}
}
// Build
@override
Widget build(BuildContext context) {
if (!_videoReady) {
return const Scaffold(
backgroundColor: SojornColors.basicBlack,
body: Center(child: CircularProgressIndicator(color: SojornColors.basicWhite)),
);
}
return Scaffold(
backgroundColor: SojornColors.basicBlack,
body: LayoutBuilder(
builder: (context, constraints) {
final w = constraints.maxWidth;
final h = constraints.maxHeight;
return Stack(
fit: StackFit.expand,
children: [
// 1. Looping video
Center(
child: FittedBox(
fit: BoxFit.cover,
child: SizedBox(
width: _controller.value.size.width,
height: _controller.value.size.height,
child: VideoPlayer(_controller),
),
),
),
// 2. Overlay items (draggable, pinch-to-scale/rotate)
..._overlays.map((o) => _buildOverlayWidget(o, w, h)),
// 3. Trash zone (shown while dragging)
if (_showTrash)
Positioned(
bottom: 40,
left: 0,
right: 0,
child: Center(
child: AnimatedContainer(
duration: const Duration(milliseconds: 150),
padding: const EdgeInsets.all(16),
decoration: BoxDecoration(
color: _overTrash
? SojornColors.destructive
: const Color(0xAA000000),
shape: BoxShape.circle,
),
child: Icon(
Icons.delete_outline,
color: SojornColors.basicWhite,
size: _overTrash ? 40 : 32,
),
),
),
),
// 4. Top-left back button
SafeArea(
child: Align(
alignment: Alignment.topLeft,
child: IconButton(
icon: const Icon(Icons.arrow_back, color: SojornColors.basicWhite),
onPressed: () => Navigator.pop(context),
),
),
),
// 5. Right sidebar (Text, Sticker, Sound)
Positioned(
right: 16,
top: 100,
child: SafeArea(
child: Column(
children: [
_buildSideButton(Icons.text_fields, 'Text', _openTextSheet),
const SizedBox(height: 20),
_buildSideButton(Icons.emoji_emotions_outlined, 'Sticker', _openStickerSheet),
const SizedBox(height: 20),
_buildSideButton(
_selectedAudio != null ? Icons.music_note : Icons.music_note_outlined,
_selectedAudio != null ? 'Sound ✓' : 'Sound',
_pickSound,
),
],
),
),
),
// 6. "Post Quip" FAB (bottom-right)
Positioned(
bottom: 40,
right: 20,
child: FloatingActionButton.extended(
backgroundColor: AppTheme.brightNavy,
onPressed: _postQuip,
icon: const Icon(Icons.send_rounded, color: SojornColors.basicWhite),
label: const Text(
'Post Quip',
style: TextStyle(color: SojornColors.basicWhite, fontWeight: FontWeight.w600),
),
),
),
],
);
},
),
);
}
Widget _buildOverlayWidget(_EditableOverlay overlay, double w, double h) {
final absX = overlay.normalizedX * w;
final absY = overlay.normalizedY * h;
final isText = overlay.type == QuipOverlayType.text;
return Positioned(
left: absX - 60, // rough half-width offset so item centers on position
top: absY - 30,
child: GestureDetector(
onScaleStart: (_) {
setState(() {
_draggingId = overlay.id;
_showTrash = true;
});
},
onScaleUpdate: (details) {
final idx = _overlays.indexWhere((o) => o.id == overlay.id);
if (idx == -1) return;
// Convert global focal point to normalized position
final newNX = (details.focalPoint.dx / w).clamp(0.0, 1.0);
final newNY = (details.focalPoint.dy / h).clamp(0.0, 1.0);
// Detect if over trash zone (bottom 80px)
final overTrash = details.focalPoint.dy > h - 80;
setState(() {
_overTrash = overTrash;
_overlays[idx] = _overlays[idx].copyWith(
normalizedX: newNX,
normalizedY: newNY,
scale: (_overlays[idx].scale * details.scale).clamp(0.3, 5.0),
rotation: _overlays[idx].rotation + details.rotation,
);
});
},
onScaleEnd: (_) {
if (_overTrash && _draggingId != null) {
_removeOverlay(_draggingId!);
}
setState(() {
_draggingId = null;
_showTrash = false;
_overTrash = false;
});
},
child: Transform(
transform: Matrix4.identity()
..scale(overlay.scale)
..rotateZ(overlay.rotation),
alignment: Alignment.center,
child: isText
? _buildTextChip(overlay)
: _buildStickerChip(overlay),
),
),
);
}
Widget _buildTextChip(_EditableOverlay overlay) {
return Container(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6),
decoration: BoxDecoration(
color: Colors.black.withValues(alpha: 0.4),
borderRadius: BorderRadius.circular(8),
),
child: Text(
overlay.content,
style: TextStyle(
color: overlay.color,
fontSize: 28,
fontWeight: FontWeight.bold,
shadows: const [Shadow(blurRadius: 4, color: Colors.black)],
),
),
);
}
Widget _buildStickerChip(_EditableOverlay overlay) {
final isEmoji = overlay.content.runes.length == 1 ||
overlay.content.length <= 2;
if (isEmoji) {
return Text(overlay.content, style: const TextStyle(fontSize: 48));
}
// Text sticker ('LOL', 'OMG', etc.)
return Container(
padding: const EdgeInsets.symmetric(horizontal: 14, vertical: 8),
decoration: BoxDecoration(
border: Border.all(color: SojornColors.basicWhite, width: 2),
borderRadius: BorderRadius.circular(8),
color: Colors.black.withValues(alpha: 0.3),
),
child: Text(
overlay.content,
style: const TextStyle(
color: SojornColors.basicWhite,
fontSize: 24,
fontWeight: FontWeight.bold,
),
),
);
}
Widget _buildSideButton(IconData icon, String label, VoidCallback onTap) {
return Column(
children: [
GestureDetector(
onTap: onTap,
child: CircleAvatar(
backgroundColor: const Color(0x8A000000),
radius: 24,
child: Icon(icon, color: SojornColors.basicWhite, size: 26),
),
),
const SizedBox(height: 4),
Text(
label,
style: const TextStyle(
color: SojornColors.basicWhite,
fontSize: 11,
shadows: [Shadow(blurRadius: 2, color: Colors.black)],
),
),
],
);
}
}
// Internal mutable overlay state
class _EditableOverlay {
final String id;
final QuipOverlayType type;
final String content;
final Color color;
double normalizedX;
double normalizedY;
double scale;
double rotation;
_EditableOverlay({
required this.id,
required this.type,
required this.content,
required this.color,
required this.normalizedX,
required this.normalizedY,
required this.scale,
required this.rotation,
});
_EditableOverlay copyWith({
double? normalizedX,
double? normalizedY,
double? scale,
double? rotation,
}) {
return _EditableOverlay(
id: id,
type: type,
content: content,
color: color,
normalizedX: normalizedX ?? this.normalizedX,
normalizedY: normalizedY ?? this.normalizedY,
scale: scale ?? this.scale,
rotation: rotation ?? this.rotation,
);
}
Map<String, dynamic> toJson() => {
'id': id,
'type': type.name,
'content': content,
'color': color.value,
'position': {'x': normalizedX, 'y': normalizedY},
'scale': scale,
'rotation': rotation,
};
}

View file

@ -1,5 +1,8 @@
import 'dart:convert';
import 'package:flutter/material.dart';
import 'package:video_player/video_player.dart';
import '../../../models/quip_text_overlay.dart';
import '../../../widgets/media/signed_media_image.dart';
import '../../../widgets/video_player_with_comments.dart';
import '../../../models/post.dart';
@ -194,6 +197,81 @@ class QuipVideoItem extends StatelessWidget {
);
}
/// Parses overlay_json and returns a list of non-interactive overlay widgets
/// rendered on top of the video during feed playback.
List<Widget> _buildOverlayWidgets(BoxConstraints constraints) {
final json = quip.overlayJson;
if (json == null || json.isEmpty) return [];
try {
final decoded = jsonDecode(json) as Map<String, dynamic>;
final items = (decoded['overlays'] as List<dynamic>? ?? [])
.whereType<Map<String, dynamic>>()
.map(QuipOverlayItem.fromJson)
.toList();
final w = constraints.maxWidth;
final h = constraints.maxHeight;
return items.map((item) {
final absX = item.position.dx * w;
final absY = item.position.dy * h;
final isSticker = item.type == QuipOverlayType.sticker;
Widget child;
if (isSticker) {
final isEmoji = item.content.runes.length == 1 ||
item.content.length <= 2;
if (isEmoji) {
child = Text(item.content,
style: TextStyle(fontSize: 42 * item.scale));
} else {
child = Container(
padding: const EdgeInsets.symmetric(horizontal: 12, vertical: 6),
decoration: BoxDecoration(
border: Border.all(color: SojornColors.basicWhite, width: 2),
borderRadius: BorderRadius.circular(8),
color: Colors.black.withValues(alpha: 0.3),
),
child: Text(
item.content,
style: TextStyle(
color: SojornColors.basicWhite,
fontSize: 20 * item.scale,
fontWeight: FontWeight.bold,
),
),
);
}
} else {
child = Container(
padding: const EdgeInsets.symmetric(horizontal: 10, vertical: 5),
decoration: BoxDecoration(
color: Colors.black.withValues(alpha: 0.4),
borderRadius: BorderRadius.circular(6),
),
child: Text(
item.content,
style: TextStyle(
color: item.color,
fontSize: 24 * item.scale,
fontWeight: FontWeight.bold,
shadows: const [Shadow(blurRadius: 4, color: Colors.black)],
),
),
);
}
return Positioned(
left: absX - 50,
top: absY - 20,
child: Transform.rotate(angle: item.rotation, child: child),
);
}).toList();
} catch (_) {
return [];
}
}
Widget _buildPauseOverlay() {
if (!isActive || !isUserPaused) return const SizedBox.shrink();
@ -219,7 +297,8 @@ class QuipVideoItem extends StatelessWidget {
onTap: onTogglePause,
child: Container(
color: SojornColors.basicBlack,
child: Stack(
child: LayoutBuilder(
builder: (context, constraints) => Stack(
fit: StackFit.expand,
children: [
AnimatedOpacity(
@ -227,6 +306,8 @@ class QuipVideoItem extends StatelessWidget {
opacity: isActive ? 1 : 0.6,
child: _buildVideo(),
),
// Quip overlays (text + stickers, non-interactive in feed)
..._buildOverlayWidgets(constraints),
Container(
decoration: const BoxDecoration(
gradient: LinearGradient(
@ -304,6 +385,7 @@ class QuipVideoItem extends StatelessWidget {
],
),
),
),
);
}

View file

@ -24,6 +24,7 @@ class Quip {
final String? avatarUrl;
final int? durationMs;
final int? likeCount;
final String? overlayJson;
const Quip({
required this.id,
@ -35,6 +36,7 @@ class Quip {
this.avatarUrl,
this.durationMs,
this.likeCount,
this.overlayJson,
});
factory Quip.fromMap(Map<String, dynamic> map) {
@ -53,6 +55,7 @@ class Quip {
avatarUrl: author?['avatar_url'] as String?,
durationMs: map['duration_ms'] as int?,
likeCount: _parseLikeCount(map['metrics']),
overlayJson: map['overlay_json'] as String?,
);
}

View file

@ -570,6 +570,7 @@ class ApiService {
bool isNsfw = false,
String? nsfwReason,
String? visibility,
String? overlayJson,
}) async {
// Validate and sanitize inputs
if (body.isEmpty) {
@ -625,6 +626,7 @@ class ApiService {
if (isNsfw) 'is_nsfw': true,
if (nsfwReason != null) 'nsfw_reason': nsfwReason,
if (visibility != null) 'visibility': visibility,
if (overlayJson != null) 'overlay_json': overlayJson,
},
requireSignature: true,
);

View file

@ -1,4 +1,5 @@
import 'dart:io';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:file_picker/file_picker.dart';
import 'package:path_provider/path_provider.dart';
@ -66,7 +67,7 @@ class AudioOverlayService {
static Future<File?> pickAudioFile() async {
try {
// Request storage permission if needed
if (Platform.isAndroid) {
if (!kIsWeb && Platform.isAndroid) {
final status = await Permission.storage.request();
if (status != PermissionStatus.granted) {
return null;

View file

@ -84,6 +84,9 @@ class ImageUploadService {
throw UploadException('Not authenticated. Please sign in again.');
}
// Strip metadata (GPS, device info, timestamps) before upload
final sanitized = await MediaSanitizer.sanitizeVideo(videoFile);
// Use Go API upload endpoint with R2 integration
final uri = Uri.parse('${ApiConfig.baseUrl}/upload');
@ -92,15 +95,14 @@ class ImageUploadService {
request.headers['Authorization'] = 'Bearer $token';
// CRITICAL: Use fromPath to stream from disk instead of loading into memory
final fileLength = await videoFile.length();
request.files.add(await http.MultipartFile.fromPath(
'media', // Field name matches upload-media
videoFile.path,
sanitized.path,
contentType: http_parser.MediaType.parse('video/mp4'),
));
request.fields['type'] = 'video';
request.fields['fileName'] = videoFile.path.split('/').last;
request.fields['fileName'] = sanitized.path.split('/').last;
onProgress?.call(0.1);

View file

@ -2,6 +2,7 @@ import 'dart:io';
import 'package:flutter/services.dart';
import 'package:flutter_image_compress/flutter_image_compress.dart';
import 'package:image/image.dart' as img;
import 'media/ffmpeg.dart';
class MediaSanitizer {
static Future<File> sanitizeImage(File rawFile) async {
@ -39,10 +40,6 @@ class MediaSanitizer {
}
static Future<File> sanitizeVideo(File rawFile) async {
// For videos, we just validate and return the original file
// Video processing is handled by the video compression library
// This method ensures the file exists and is readable
if (!await rawFile.exists()) {
throw Exception('Video file does not exist');
}
@ -54,7 +51,6 @@ class MediaSanitizer {
throw Exception('Video size exceeds 50MB limit');
}
// Check if it's a valid video file by extension
final fileName = rawFile.path.split('/').last.toLowerCase();
final extension = fileName.split('.').last;
const validExtensions = {'mp4', 'mov', 'webm'};
@ -63,7 +59,23 @@ class MediaSanitizer {
throw Exception('Unsupported video format: $extension');
}
// Return the original file as videos don't need sanitization like images
// Strip all metadata (GPS, device info, timestamps) via FFmpeg remux no re-encode.
try {
final tempDir = Directory.systemTemp;
final output = File(
'${tempDir.path}${Platform.pathSeparator}stripped_${DateTime.now().microsecondsSinceEpoch}.mp4',
);
final session = await FFmpegKit.execute(
'-y -i "${rawFile.path}" -map_metadata -1 -c copy "${output.path}"',
);
final rc = await session.getReturnCode();
if (ReturnCode.isSuccess(rc) && await output.exists()) {
return output;
}
} catch (_) {
// FFmpeg unavailable fall through and return original
}
return rawFile;
}
}

View file

@ -79,7 +79,7 @@ class VideoStitchingService {
if (segments.length == 1) {
// Single video with effects
command = "-i '${segments.first.path}' $filterString '${outputFile.path}'";
command = "-i '${segments.first.path}' $filterString -map_metadata -1 '${outputFile.path}'";
} else {
// Multiple videos - stitch first, then apply effects
final listFile = File('${tempDir.path}/segments_list.txt');
@ -91,7 +91,7 @@ class VideoStitchingService {
final tempStitched = File('${tempDir.path}/temp_stitched.mp4');
// First stitch without effects
// First stitch without effects (metadata stripped at final pass)
final stitchCommand = "-f concat -safe 0 -i '${listFile.path}' -c copy '${tempStitched.path}'";
final stitchSession = await FFmpegKit.execute(stitchCommand);
final stitchReturnCode = await stitchSession.getReturnCode();
@ -100,8 +100,8 @@ class VideoStitchingService {
return null;
}
// Then apply effects to the stitched video
command = "-i '${tempStitched.path}' $filterString '${outputFile.path}'";
// Then apply effects to the stitched video, stripping metadata at final output
command = "-i '${tempStitched.path}' $filterString -map_metadata -1 '${outputFile.path}'";
}
final session = await FFmpegKit.execute(command);
@ -120,7 +120,7 @@ class VideoStitchingService {
final audioCmd =
"-i '${outputFile.path}' -i '$audioOverlayPath' "
"-filter_complex '[1:a]volume=${vol}[a1];[0:a][a1]amix=inputs=2:duration=first:dropout_transition=0' "
"-c:v copy -shortest '${audioOutputFile.path}'";
"-map_metadata -1 -c:v copy -shortest '${audioOutputFile.path}'";
final audioSession = await FFmpegKit.execute(audioCmd);
final audioCode = await audioSession.getReturnCode();
if (ReturnCode.isSuccess(audioCode)) {

View file

@ -67,7 +67,7 @@ class _EnhancedBeaconMapState extends ConsumerState<EnhancedBeaconMap>
Future<void> _getUserLocation() async {
try {
final position = await Geolocator.getCurrentPosition(
desiredAccuracy: LocationAccuracy.high,
desiredAccuracy: LocationAccuracy.low,
);
setState(() {
_userLocation = LatLng(position.latitude, position.longitude);

View file

@ -1,4 +1,5 @@
import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:flutter/material.dart';
import 'package:geolocator/geolocator.dart';
import '../../services/api_service.dart';
@ -71,6 +72,15 @@ class _NeighborhoodPickerSheetState extends State<NeighborhoodPickerSheet> {
_isLoadingGps = true;
_gpsError = null;
});
if (kIsWeb) {
setState(() {
_gpsError = 'GPS detection unavailable on web. Enter your ZIP code below.';
_isLoadingGps = false;
});
return;
}
try {
final position = await Geolocator.getCurrentPosition(
desiredAccuracy: LocationAccuracy.low, // "fuzzy" faster + less battery