Updated bunch of routes and enabled chat
This commit is contained in:
parent
d4e75a65d4
commit
9f6f7a0bab
|
|
@ -607,6 +607,64 @@ CREATE TRIGGER trg_update_listing_status_on_sold
|
|||
AFTER INSERT ON sold_information
|
||||
FOR EACH ROW EXECUTE FUNCTION update_listing_status_on_sold();
|
||||
|
||||
|
||||
-- Trigger 5: Propagate Animal Updates to Listings
|
||||
CREATE OR REPLACE FUNCTION propagate_animal_updates_to_listings()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- Update the related listing to trigger the sync logic
|
||||
UPDATE listings
|
||||
SET updated_at = NOW()
|
||||
WHERE animal_id = NEW.id;
|
||||
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trg_propagate_animal_updates
|
||||
AFTER UPDATE ON animals
|
||||
FOR EACH ROW EXECUTE FUNCTION propagate_animal_updates_to_listings();
|
||||
|
||||
|
||||
-- Trigger 6: Cascade Delete Listing -> Animal
|
||||
CREATE OR REPLACE FUNCTION cascade_delete_animal()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
UPDATE animals
|
||||
SET deleted = TRUE, updated_at = NOW()
|
||||
WHERE id = NEW.animal_id;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trg_cascade_delete_animal
|
||||
AFTER UPDATE ON listings
|
||||
FOR EACH ROW
|
||||
WHEN (OLD.deleted = FALSE AND NEW.deleted = TRUE)
|
||||
EXECUTE FUNCTION cascade_delete_animal();
|
||||
|
||||
|
||||
-- Trigger 7: Ensure Single Primary Media per Listing
|
||||
CREATE OR REPLACE FUNCTION ensure_single_primary_media()
|
||||
RETURNS TRIGGER AS $$
|
||||
BEGIN
|
||||
-- If the new/updated row is set to be primary
|
||||
IF NEW.is_primary = TRUE THEN
|
||||
-- Reset is_primary for all OTHER records of this listing
|
||||
UPDATE listing_media
|
||||
SET is_primary = FALSE
|
||||
WHERE listing_id = NEW.listing_id
|
||||
AND id != NEW.id -- Don't unset the one we are just creating/updating
|
||||
AND is_primary = TRUE; -- Optimization: only update if currently true
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER trg_ensure_single_primary_media
|
||||
BEFORE INSERT OR UPDATE ON listing_media
|
||||
FOR EACH ROW EXECUTE FUNCTION ensure_single_primary_media();
|
||||
|
||||
-- ======================================================
|
||||
-- END OF SCRIPT
|
||||
-- ======================================================
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -21,6 +21,7 @@
|
|||
"cors": "^2.8.5",
|
||||
"dotenv": "^17.2.3",
|
||||
"express": "^5.1.0",
|
||||
"firebase-admin": "^13.6.0",
|
||||
"node-cron": "^4.2.1",
|
||||
"pg": "^8.16.3",
|
||||
"socket.io": "^4.8.1"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
import express from "express";
|
||||
import pool from "../db/pool.js";
|
||||
import { getIO, getSocketId } from "../socket.js";
|
||||
// import { sendPushNotification } from "../utils/fcm.js";
|
||||
|
||||
const router = express.Router();
|
||||
|
||||
|
|
@ -134,10 +135,6 @@ router.post("/messages", async (req, res) => {
|
|||
media_metadata || null
|
||||
]);
|
||||
|
||||
// Update conversation timestamp
|
||||
const updateConvQuery = `UPDATE conversations SET updated_at = NOW() WHERE id = $1`;
|
||||
await client.query(updateConvQuery, [conversation_id]);
|
||||
|
||||
await client.query("COMMIT");
|
||||
|
||||
// Real-time update via Socket.io
|
||||
|
|
@ -145,6 +142,25 @@ router.post("/messages", async (req, res) => {
|
|||
if (receiverSocketId) {
|
||||
getIO().to(receiverSocketId).emit("receive_message", messageResult.rows[0]);
|
||||
}
|
||||
// else {
|
||||
// // Receiver is OFFLINE: Send Push Notification
|
||||
// const fcmQuery = `SELECT fcm_token FROM user_devices WHERE user_id = $1 AND fcm_token IS NOT NULL AND is_active = TRUE`;
|
||||
// const fcmResult = await pool.query(fcmQuery, [receiver_id]);
|
||||
|
||||
// if (fcmResult.rows.length > 0) {
|
||||
// const tokens = fcmResult.rows.map(row => row.fcm_token);
|
||||
// // Title could be sender's name if we fetched it, or generic. For speed, generic "New Message".
|
||||
// // Ideally, we'd join sender info in the SELECT or pass it if available.
|
||||
// const notificationTitle = "New Message";
|
||||
// const notificationBody = message_type === 'text' ? (content.substring(0, 100) + (content.length > 100 ? "..." : "")) : "Sent a media file";
|
||||
|
||||
// sendPushNotification(tokens, notificationTitle, notificationBody, {
|
||||
// type: "new_message",
|
||||
// conversation_id: conversation_id,
|
||||
// message_id: messageResult.rows[0].id
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
|
||||
res.status(201).json(messageResult.rows[0]);
|
||||
} catch (err) {
|
||||
|
|
@ -156,34 +172,52 @@ router.post("/messages", async (req, res) => {
|
|||
}
|
||||
});
|
||||
|
||||
// 5. PUT /messages/:messageId/read (Mark Read)
|
||||
router.put("/messages/:messageId/read", async (req, res) => {
|
||||
// 5. PUT /conversations/:conversationId/read (Mark Conversation as Read)
|
||||
router.put("/conversations/:conversationId/read", async (req, res) => {
|
||||
try {
|
||||
const { messageId } = req.params;
|
||||
const { conversationId } = req.params;
|
||||
const { userId } = req.body; // The user who is reading the messages
|
||||
|
||||
if (!userId) {
|
||||
return res.status(400).json({ error: "userId is required" });
|
||||
}
|
||||
|
||||
const queryText = `
|
||||
UPDATE messages
|
||||
SET is_read = TRUE, read_at = NOW()
|
||||
WHERE id = $1
|
||||
RETURNING *
|
||||
WHERE conversation_id = $1
|
||||
AND receiver_id = $2
|
||||
AND is_read = FALSE
|
||||
RETURNING id, sender_id, conversation_id
|
||||
`;
|
||||
const result = await pool.query(queryText, [messageId]);
|
||||
const result = await pool.query(queryText, [conversationId, userId]);
|
||||
|
||||
if (result.rows.length === 0) {
|
||||
return res.status(404).json({ error: "Message not found" });
|
||||
}
|
||||
// Even if 0 rows updated, we return success (idempotent)
|
||||
res.json({
|
||||
message: "Messages marked as read",
|
||||
updated_count: result.rows.length,
|
||||
updated_messages: result.rows
|
||||
});
|
||||
|
||||
res.json(result.rows[0]);
|
||||
// Real-time update via Socket.io
|
||||
// Notify the SENDER(s) that their messages have been read.
|
||||
// In a 1-on-1 chat, this is just one person.
|
||||
if (result.rows.length > 0) {
|
||||
const uniqueSenders = [...new Set(result.rows.map(m => m.sender_id))];
|
||||
|
||||
// Real-time update via Socket.io to the sender (so they know it's read)
|
||||
const updatedMessage = result.rows[0];
|
||||
const senderId = updatedMessage.sender_id;
|
||||
const senderSocketId = getSocketId(senderId);
|
||||
|
||||
if (senderSocketId) {
|
||||
getIO().to(senderSocketId).emit("message_read", updatedMessage);
|
||||
for (const senderId of uniqueSenders) {
|
||||
const senderSocketId = getSocketId(senderId);
|
||||
if (senderSocketId) {
|
||||
getIO().to(senderSocketId).emit("conversation_read", {
|
||||
conversation_id: conversationId,
|
||||
read_by_user_id: userId,
|
||||
updated_count: result.rows.length // Simplification: total count, not per sender, but fine for 1:1
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error("Error marking message as read:", err);
|
||||
console.error("Error marking conversation as read:", err);
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
});
|
||||
|
|
@ -217,9 +251,6 @@ router.post("/communications", async (req, res) => {
|
|||
communication_type, call_status, duration_seconds || 0, call_recording_url
|
||||
]);
|
||||
|
||||
// Trigger updates conversation via DB trigger, but we might want to emit socket event?
|
||||
// For now, just return success.
|
||||
|
||||
await client.query("COMMIT");
|
||||
res.status(201).json(result.rows[0]);
|
||||
|
||||
|
|
|
|||
|
|
@ -167,7 +167,7 @@ router.get("/", async (req, res) => {
|
|||
// 2. GET /near-me (Spatial Search) - Optimized with idx_listings_spatial
|
||||
router.get("/near-me", async (req, res) => {
|
||||
try {
|
||||
const { lat, lng, radius_meters = 50000, limit = 20, offset = 0 } = req.query;
|
||||
const { lat, lng, radius_meters = 100000, limit = 20, offset = 0 } = req.query;
|
||||
|
||||
if (!lat || !lng) {
|
||||
return res.status(400).json({ error: "Latitude and Longitude are required" });
|
||||
|
|
@ -176,6 +176,7 @@ router.get("/near-me", async (req, res) => {
|
|||
const queryText = `
|
||||
SELECT * FROM listings
|
||||
WHERE deleted = FALSE
|
||||
AND status = 'active'
|
||||
AND ST_DWithin(filter_location_geog, ST_SetSRID(ST_MakePoint($1, $2), 4326)::geography, $3)
|
||||
ORDER BY filter_location_geog <-> ST_SetSRID(ST_MakePoint($1, $2), 4326)::geography
|
||||
LIMIT $4 OFFSET $5
|
||||
|
|
@ -360,7 +361,17 @@ router.get("/:id", async (req, res) => {
|
|||
const { id } = req.params;
|
||||
|
||||
const queryText = `
|
||||
SELECT l.*, row_to_json(a) as animal
|
||||
SELECT
|
||||
l.*,
|
||||
row_to_json(a) as animal,
|
||||
COALESCE(
|
||||
(
|
||||
SELECT json_agg(m ORDER BY m.is_primary DESC, m.sort_order ASC)
|
||||
FROM listing_media m
|
||||
WHERE m.listing_id = l.id AND m.deleted = FALSE
|
||||
),
|
||||
'[]'
|
||||
) as media
|
||||
FROM listings l
|
||||
JOIN animals a ON l.animal_id = a.id
|
||||
WHERE l.id = $1 AND l.deleted = FALSE
|
||||
|
|
@ -445,6 +456,35 @@ router.put("/:id", async (req, res) => {
|
|||
await client.query(updateAnimalQuery, animalParams);
|
||||
}
|
||||
|
||||
// 3. Update Media (if provided)
|
||||
// Check if media is provided (it can be an empty array if the user wants to remove all media)
|
||||
if (req.body.media && Array.isArray(req.body.media)) {
|
||||
const media = req.body.media;
|
||||
|
||||
// Soft delete existing media
|
||||
await client.query(
|
||||
"UPDATE listing_media SET deleted = TRUE WHERE listing_id = $1",
|
||||
[id]
|
||||
);
|
||||
|
||||
// Insert new media
|
||||
if (media.length > 0) {
|
||||
const mediaInsertQuery = `
|
||||
INSERT INTO listing_media (listing_id, media_url, media_type, is_primary, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
`;
|
||||
for (const item of media) {
|
||||
await client.query(mediaInsertQuery, [
|
||||
id,
|
||||
item.media_url,
|
||||
item.media_type,
|
||||
item.is_primary || false,
|
||||
item.sort_order || 0
|
||||
]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await client.query("COMMIT");
|
||||
|
||||
// Fetch complete updated data
|
||||
|
|
@ -473,7 +513,7 @@ router.delete("/:id", async (req, res) => {
|
|||
|
||||
const queryText = `
|
||||
UPDATE listings
|
||||
SET deleted = TRUE, deleted_reason = $1
|
||||
SET deleted = TRUE, status = 'deleted', deleted_reason = $1
|
||||
WHERE id = $2
|
||||
RETURNING *
|
||||
`;
|
||||
|
|
@ -595,4 +635,55 @@ router.patch("/:id/score", async (req, res) => {
|
|||
}
|
||||
});
|
||||
|
||||
|
||||
// Add Media to Listing
|
||||
router.post("/:id/media", async (req, res) => {
|
||||
const client = await pool.connect();
|
||||
try {
|
||||
const { id } = req.params;
|
||||
const { media } = req.body; // Array of { media_url, media_type, is_primary, sort_order }
|
||||
|
||||
if (!media || !Array.isArray(media) || media.length === 0) {
|
||||
return res.status(400).json({ error: "Media array is required" });
|
||||
}
|
||||
|
||||
await client.query("BEGIN");
|
||||
|
||||
// Check if listing exists
|
||||
const checkQuery = "SELECT id FROM listings WHERE id = $1 AND deleted = FALSE";
|
||||
const checkResult = await client.query(checkQuery, [id]);
|
||||
if (checkResult.rows.length === 0) {
|
||||
await client.query("ROLLBACK");
|
||||
return res.status(404).json({ error: "Listing not found" });
|
||||
}
|
||||
|
||||
const mediaInsertQuery = `
|
||||
INSERT INTO listing_media (listing_id, media_url, media_type, is_primary, sort_order)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
RETURNING *
|
||||
`;
|
||||
|
||||
const insertedMedia = [];
|
||||
for (const item of media) {
|
||||
const result = await client.query(mediaInsertQuery, [
|
||||
id,
|
||||
item.media_url,
|
||||
item.media_type,
|
||||
item.is_primary || false,
|
||||
item.sort_order || 0
|
||||
]);
|
||||
insertedMedia.push(result.rows[0]);
|
||||
}
|
||||
|
||||
await client.query("COMMIT");
|
||||
res.status(201).json(insertedMedia);
|
||||
} catch (err) {
|
||||
await client.query("ROLLBACK");
|
||||
console.error("Error adding media to listing:", err);
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
} finally {
|
||||
client.release();
|
||||
}
|
||||
});
|
||||
|
||||
export default router;
|
||||
|
|
|
|||
|
|
@ -0,0 +1,78 @@
|
|||
|
||||
// utils/fcm.js
|
||||
// This file handles Firebase Cloud Messaging (FCM) notifications.
|
||||
// Note: You must provide a valid serviceAccountKey.json and install firebase-admin
|
||||
// for this to strictly work. We include safety checks to prevent crashes if missing.
|
||||
|
||||
import admin from 'firebase-admin';
|
||||
import { createRequire } from "module";
|
||||
const require = createRequire(import.meta.url);
|
||||
|
||||
let serviceAccount = null;
|
||||
try {
|
||||
// Attempt to load credentials. In production, these might be env vars.
|
||||
// For now, looking for a file in the project root or config folder.
|
||||
serviceAccount = require("../serviceAccountKey.json");
|
||||
} catch (error) {
|
||||
console.warn("FCM: serviceAccountKey.json not found. Push notifications will be disabled.");
|
||||
}
|
||||
|
||||
let isFcmInitialized = false;
|
||||
|
||||
if (serviceAccount) {
|
||||
try {
|
||||
admin.initializeApp({
|
||||
credential: admin.credential.cert(serviceAccount)
|
||||
});
|
||||
isFcmInitialized = true;
|
||||
console.log("FCM Initialized successfully.");
|
||||
} catch (error) {
|
||||
console.error("Error initializing FCM:", error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sends a push notification to specific device tokens.
|
||||
* @param {string[]} tokens - Array of FCM registration tokens
|
||||
* @param {string} title - Notification title
|
||||
* @param {string} body - Notification body text
|
||||
* @param {object} data - Custom data payload (optional)
|
||||
*/
|
||||
export const sendPushNotification = async (tokens, title, body, data = {}) => {
|
||||
if (!isFcmInitialized || !tokens || tokens.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure tokens is an array
|
||||
const validTokens = Array.isArray(tokens) ? tokens : [tokens];
|
||||
|
||||
const message = {
|
||||
notification: {
|
||||
title: title || "New Message",
|
||||
body: body || "You have a new message",
|
||||
},
|
||||
data: {
|
||||
...data,
|
||||
click_action: "FLUTTER_NOTIFICATION_CLICK" // Standard for many frameworks
|
||||
},
|
||||
tokens: validTokens,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await admin.messaging().sendMulticast(message);
|
||||
console.log(`FCM sent: ${response.successCount} successes, ${response.failureCount} failures.`);
|
||||
|
||||
if (response.failureCount > 0) {
|
||||
const failedTokens = [];
|
||||
response.responses.forEach((resp, idx) => {
|
||||
if (!resp.success) {
|
||||
failedTokens.push(validTokens[idx]);
|
||||
// Optional: Remove invalid tokens from DB here if error code is 'messaging/registration-token-not-registered'
|
||||
}
|
||||
});
|
||||
// console.log('Failed tokens:', failedTokens);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error sending FCM notification:", error);
|
||||
}
|
||||
};
|
||||
Loading…
Reference in New Issue