Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions src/mcp/local-tools.js
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import {
scrapeTweets,
searchTweets,
scrapeThread,
scrapeLikedTweets,
scrapeLikes,
scrapeMedia,
scrapeListMembers,
Expand Down Expand Up @@ -630,6 +631,11 @@ export async function x_get_bookmarks({ limit = 100 }) {
return scrapeBookmarks(pg, { limit });
}

export async function x_get_likes({ username, limit = 50, from, to }) {
const { page: pg } = await ensureBrowser();
return scrapeLikedTweets(pg, username, { limit, from, to });
}

export async function x_clear_bookmarks() {
const { page: pg } = await ensureBrowser();
await pg.goto('https://x.com/i/bookmarks', { waitUntil: 'networkidle2' });
Expand Down Expand Up @@ -1369,6 +1375,7 @@ export const toolMap = {
x_reply,
x_bookmark,
x_get_bookmarks,
x_get_likes,
x_clear_bookmarks,
x_auto_like,
// Discovery
Expand Down
22 changes: 4 additions & 18 deletions src/mcp/server.js
Original file line number Diff line number Diff line change
Expand Up @@ -1903,12 +1903,14 @@ const TOOLS = [
},
{
name: 'x_get_likes',
description: 'Scrape tweets that a user has liked. Shows what content a user engages with.',
description: 'Scrape liked tweets via GraphQL API with rich data. Writes results to a JSONL file. Supports timestamp filtering — stops early when passing the "from" date.',
inputSchema: {
type: 'object',
properties: {
username: { type: 'string', description: 'Username (without @)' },
limit: { type: 'number', description: 'Maximum liked tweets (default: 50)' },
from: { type: 'string', description: 'Only include likes from this date onward (e.g. "2026-03-01")' },
to: { type: 'string', description: 'Only include likes up to this date (e.g. "2026-03-31")' },
},
required: ['username'],
},
Expand Down Expand Up @@ -2275,7 +2277,7 @@ async function executeTool(name, args) {
const xeepyTools = [
'x_get_replies', 'x_get_hashtag', 'x_get_likers', 'x_get_retweeters',
'x_get_media', 'x_get_recommendations', 'x_get_mentions', 'x_get_quote_tweets',
'x_get_likes', 'x_auto_follow', 'x_follow_engagers', 'x_unfollow_all',
'x_auto_follow', 'x_follow_engagers', 'x_unfollow_all',
'x_smart_unfollow', 'x_quote_tweet', 'x_auto_comment', 'x_auto_retweet',
'x_detect_bots', 'x_find_influencers', 'x_smart_target', 'x_crypto_analyze',
'x_grok_analyze_image', 'x_audience_insights', 'x_engagement_report',
Expand Down Expand Up @@ -2475,22 +2477,6 @@ async function executeXeepyTool(name, args) {
return { quotes, count: quotes.length };
}

case 'x_get_likes': {
const page = await localTools.getPage();
await page.goto(`https://x.com/${args.username}/likes`, { waitUntil: 'networkidle2', timeout: 30000 });
await new Promise(r => setTimeout(r, 3000));
const likedTweets = await page.evaluate((limit) => {
const articles = document.querySelectorAll('article[data-testid="tweet"]');
return Array.from(articles).slice(0, limit).map(el => {
const textEl = el.querySelector('[data-testid="tweetText"]');
const userEl = el.querySelector('[data-testid="User-Name"]');
const timeEl = el.querySelector('time');
return { text: textEl?.textContent || '', author: userEl?.textContent || '', timestamp: timeEl?.getAttribute('datetime') || '' };
});
}, args.limit || 50);
return { likedTweets, count: likedTweets.length, username: args.username };
}

// ── Follow Automation ──
case 'x_auto_follow': {
// Find users via search, then follow them with delays
Expand Down
2 changes: 2 additions & 0 deletions src/scrapers/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ export const {
scrapeTweets,
searchTweets,
scrapeThread,
scrapeLikedTweets,
scrapeLikes,
scrapeHashtag,
scrapeMedia,
Expand Down Expand Up @@ -308,6 +309,7 @@ export default {
scrapeTweets,
searchTweets,
scrapeThread,
scrapeLikedTweets,
scrapeLikes,
scrapeHashtag,
scrapeMedia,
Expand Down
Loading