---
1. Visão Geral 2. Fontes de Dados 3. Exemplos de Código 4. APIs Backend 5. Armazenamento e Cache 6. Casos de Uso ---
O sistema integra 4 dimensões de dados:
---
#### CoinGecko API (Recomendado - Gratuito)
// Buscar dados de mercado
async function fetchCoinGeckoData(coinId, days = 90) {
const url = https://api.coingecko.com/api/v3/coins/${coinId}/market_chart?vs_currency=usd&days=${days};
const response = await fetch(url);
const data = await response.json();
return {
prices: data.prices.map(([ts, price]) => ({ timestamp: ts, price })),
volumes: data.total_volumes.map(([ts, vol]) => ({ timestamp: ts, volume: vol })),
marketCaps: data.market_caps.map(([ts, cap]) => ({ timestamp: ts, marketCap: cap }))
};
}
// Exemplo de uso
const bitcoinData = await fetchCoinGeckoData('bitcoin', 90);
Rate Limits: 50 calls/min (free), 500 calls/min (pro)
#### Alpha Vantage (Stocks & Crypto)
// Requer API Key gratuita: https://www.alphavantage.co/support/#api-key
async function fetchAlphaVantage(symbol, apiKey) {
const url = https://www.alphavantage.co/query?function=TIME_SERIES_DAILY&symbol=${symbol}&apikey=${apiKey};
const response = await fetch(url);
const data = await response.json();
return Object.entries(data['Time Series (Daily)']).map(([date, values]) => ({
date,
open: parseFloat(values['1. open']),
high: parseFloat(values['2. high']),
low: parseFloat(values['3. low']),
close: parseFloat(values['4. close']),
volume: parseInt(values['5. volume'])
}));
}
Rate Limits: 5 calls/min (free), 75 calls/min (premium)
---
#### GitHub API
// Sem autenticação: 60 calls/hora
// Com token: 5000 calls/hora
async function fetchGitHubMetrics(owner, repo, token = null) {
const headers = token ? { 'Authorization': token ${token} } : {};
// Dados do repositório
const repoResponse = await fetch(
https://api.github.com/repos/${owner}/${repo},
{ headers }
);
const repoData = await repoResponse.json();
// Atividade de commits (última semana)
const statsResponse = await fetch(
https://api.github.com/repos/${owner}/${repo}/stats/commit_activity,
{ headers }
);
const statsData = await statsResponse.json();
return {
stars: repoData.stargazers_count,
forks: repoData.forks_count,
watchers: repoData.watchers_count,
openIssues: repoData.open_issues_count,
size: repoData.size,
createdAt: repoData.created_at,
updatedAt: repoData.updated_at,
weeklyCommits: statsData.reduce((sum, week) => sum + week.total, 0)
};
}
// Gerar token: https://github.com/settings/tokens
const reactMetrics = await fetchGitHubMetrics('facebook', 'react', 'ghp_yourtoken');
#### NPM Registry
async function fetchNPMDownloads(packageName, period = 'last-year') {
const url = https://api.npmjs.org/downloads/point/${period}/${packageName};
const response = await fetch(url);
const data = await response.json();
return {
downloads: data.downloads,
package: data.package,
start: data.start,
end: data.end,
dailyAverage: data.downloads / 365
};
}
// Histórico detalhado
async function fetchNPMHistory(packageName, startDate, endDate) {
const url = https://api.npmjs.org/downloads/range/${startDate}:${endDate}/${packageName};
const response = await fetch(url);
const data = await response.json();
return data.downloads.map(day => ({
date: day.day,
downloads: day.downloads
}));
}
const reactDownloads = await fetchNPMDownloads('react');
Rate Limits: Nenhum oficial, uso razoável
#### PyPI Stats (Python)
async function fetchPyPIStats(packageName) {
const url = https://pypistats.org/api/packages/${packageName}/recent;
const response = await fetch(url);
const data = await response.json();
return {
lastDay: data.data.last_day,
lastWeek: data.data.last_week,
lastMonth: data.data.last_month
};
}
---
#### Reddit API
async function fetchRedditSentiment(subreddit, limit = 100) {
const url = https://www.reddit.com/r/${subreddit}/hot.json?limit=${limit};
const response = await fetch(url);
const data = await response.json();
const posts = data.data.children.map(child => child.data);
return {
totalPosts: posts.length,
avgScore: posts.reduce((sum, p) => sum + p.score, 0) / posts.length,
avgComments: posts.reduce((sum, p) => sum + p.num_comments, 0) / posts.length,
avgUpvoteRatio: posts.reduce((sum, p) => sum + p.upvote_ratio, 0) / posts.length,
sentiment: posts.reduce((sum, p) => sum + (p.upvote_ratio - 0.5), 0) / posts.length,
topPosts: posts.slice(0, 10).map(p => ({
title: p.title,
score: p.score,
comments: p.num_comments,
upvoteRatio: p.upvote_ratio
}))
};
}
const bitcoinSentiment = await fetchRedditSentiment('Bitcoin', 100);
#### Twitter API v2 (Requer Aprovação)
// Requer Bearer Token da Twitter Developer Account
async function fetchTwitterMentions(query, bearerToken) {
const url = https://api.twitter.com/2/tweets/search/recent?query=${encodeURIComponent(query)}&max_results=100;
const response = await fetch(url, {
headers: {
'Authorization': Bearer ${bearerToken}
}
});
const data = await response.json();
return {
tweetCount: data.meta.result_count,
tweets: data.data
};
}
#### Google Trends (via biblioteca)
npm install google-trends-api
const googleTrends = require('google-trends-api');
async function fetchGoogleTrends(keyword, startTime, endTime) {
const result = await googleTrends.interestOverTime({
keyword: keyword,
startTime: startTime,
endTime: endTime
});
const data = JSON.parse(result);
return data.default.timelineData.map(point => ({
time: point.formattedTime,
value: point.value[0]
}));
}
// Exemplo
const trends = await fetchGoogleTrends(
'bitcoin',
new Date('2024-01-01'),
new Date('2024-12-01')
);
---
#### Etherscan (Ethereum)
async function fetchEthereumMetrics(apiKey) {
const baseUrl = 'https://api.etherscan.io/api';
// Supply total
const supplyResponse = await fetch(
${baseUrl}?module=stats&action=ethsupply&apikey=${apiKey}
);
const supplyData = await supplyResponse.json();
// Transações por dia
const txResponse = await fetch(
${baseUrl}?module=stats&action=dailytx&startdate=2024-01-01&enddate=2024-12-31&apikey=${apiKey}
);
const txData = await txResponse.json();
return {
totalSupply: supplyData.result,
dailyTransactions: txData.result
};
}
// API Key gratuita: https://etherscan.io/apis
#### Blockchain.com API (Bitcoin)
async function fetchBitcoinNetworkMetrics() {
// Não requer API key
const [hashRate, difficulty, activeAddresses] = await Promise.all([
fetch('https://blockchain.info/q/hashrate').then(r => r.text()),
fetch('https://blockchain.info/q/getdifficulty').then(r => r.text()),
fetch('https://api.blockchain.info/charts/n-unique-addresses?timespan=30days&format=json').then(r => r.json())
]);
return {
hashRate: parseFloat(hashRate),
difficulty: parseFloat(difficulty),
activeAddresses: activeAddresses.values.map(v => ({
timestamp: v.x,
count: v.y
}))
};
}
#### Messari API
async function fetchMessariMetrics(asset) {
const url = https://data.messari.io/api/v1/assets/${asset}/metrics;
const response = await fetch(url);
const data = await response.json();
return {
marketCap: data.data.marketcap.current_marketcap_usd,
volume24h: data.data.market_data.volume_last_24_hours,
realVolume24h: data.data.market_data.real_volume_last_24_hours,
activeAddresses: data.data.on_chain_data?.active_addresses,
transactionCount: data.data.on_chain_data?.transaction_count
};
}
---
// server.js
const express = require('express');
const cors = require('cors');
const NodeCache = require('node-cache');
const app = express();
const cache = new NodeCache({ stdTTL: 3600 }); // Cache de 1 hora
app.use(cors());
app.use(express.json());
// Endpoint principal
app.get('/api/bubble-analysis/:asset', async (req, res) => {
const { asset } = req.params;
const cacheKey = bubble-${asset};
// Verificar cache
const cached = cache.get(cacheKey);
if (cached) {
return res.json({ ...cached, cached: true });
}
try {
// Buscar dados de múltiplas fontes
const [marketData, githubData, redditData] = await Promise.all([
fetchCoinGeckoData(asset, 90),
fetchGitHubMetrics('bitcoin', 'bitcoin'),
fetchRedditSentiment('Bitcoin', 100)
]);
// Calcular métricas
const metrics = calculateBubbleMetrics({
market: marketData,
github: githubData,
reddit: redditData
});
// Cachear resultado
cache.set(cacheKey, metrics);
res.json(metrics);
} catch (error) {
res.status(500).json({ error: error.message });
}
});
app.listen(3000, () => console.log('API rodando na porta 3000'));
// bubbleCalculator.js
function calculateBubbleMetrics(data) {
const { market, github, reddit } = data;
// 1. Adoção Real
const adoption = calculateAdoption(github, market);
// 2. Hype
const hype = calculateHype(reddit, market);
// 3. Investimento
const investment = calculateInvestment(market);
// 4. Network Effects
const network = calculateNetworkEffects(adoption);
// 5. Feedback Loops
const feedback = calculateFeedback(adoption, hype, investment, network);
// 6. Bubble Index
const bubbleIndex = calculateBubbleIndex({
adoption,
hype,
investment,
network,
feedback
});
return {
adoption,
hype,
investment,
network,
feedback,
bubbleIndex,
divergence: Math.abs(hype - adoption),
risk: classifyRisk(bubbleIndex),
timestamp: Date.now()
};
}
function calculateAdoption(github, market) {
// GitHub: 40% do peso
const githubScore = Math.min(
(github.stars / 50000) * 0.4 +
(github.forks / 10000) * 0.3 +
(github.weeklyCommits / 500) * 0.3,
0.4
);
// Volume normalizado: 30% do peso
const recentVolume = market.volumes.slice(-30);
const avgVolume = recentVolume.reduce((sum, v) => sum + v.volume, 0) / recentVolume.length;
const maxVolume = Math.max(...market.volumes.map(v => v.volume));
const volumeScore = (avgVolume / maxVolume) * 0.3;
// Active addresses: 30% do peso (se disponível)
const addressScore = 0.15; // Placeholder
return Math.min(githubScore + volumeScore + addressScore, 1);
}
function calculateHype(reddit, market) {
// Reddit sentiment: 40%
const sentimentScore = (reddit.sentiment + 0.5) * 0.4;
// Post engagement: 30%
const engagementScore = Math.min(
(reddit.avgScore / 1000) * 0.15 +
(reddit.avgComments / 100) * 0.15,
0.3
);
// Price velocity: 30%
const recent30 = market.prices.slice(-30);
const older30 = market.prices.slice(-90, -60);
const recentAvg = recent30.reduce((sum, p) => sum + p.price, 0) / recent30.length;
const olderAvg = older30.reduce((sum, p) => sum + p.price, 0) / older30.length;
const priceVelocity = Math.min((recentAvg - olderAvg) / olderAvg, 1);
const velocityScore = Math.max(priceVelocity, 0) * 0.3;
return Math.min(sentimentScore + engagementScore + velocityScore, 1);
}
function calculateInvestment(market) {
const volumes = market.volumes.map(v => v.volume);
const avgVolume = volumes.reduce((sum, v) => sum + v, 0) / volumes.length;
const maxVolume = Math.max(...volumes);
// Volume normalizado com momentum
const recentVolume = volumes.slice(-7).reduce((sum, v) => sum + v, 0) / 7;
const momentum = recentVolume / avgVolume;
return Math.min((avgVolume / maxVolume) * momentum, 1);
}
function calculateNetworkEffects(adoption) {
// Metcalfe's Law modificada: n^1.5
return Math.pow(adoption, 1.5);
}
function calculateFeedback(adoption, hype, investment, network) {
// Feedback positivo
const positive = 0.3 * (
hype * investment * 0.4 +
network * adoption * 0.3 +
investment * network * 0.3
);
// Feedback negativo (saturação)
const negative = -0.1 * Math.pow(adoption, 2) * Math.pow(hype, 2);
return positive + negative;
}
function calculateBubbleIndex({ adoption, hype, investment, network, feedback }) {
// Divergência hype-adoção (indicador chave)
const divergence = Math.abs(hype - adoption) * (hype > adoption ? 1.5 : 0.5);
// Índice composto
const index = (
0.25 * divergence +
0.30 * Math.pow(investment, 1.5) +
0.20 * (network > 0.5 ? Math.pow(network, 2) : network) +
0.25 * Math.abs(feedback)
);
return Math.min(index * 1.5, 1);
}
function classifyRisk(bubbleIndex) {
if (bubbleIndex < 0.3) return { level: 'Baixo', color: '#10b981' };
if (bubbleIndex < 0.5) return { level: 'Moderado', color: '#f59e0b' };
if (bubbleIndex < 0.7) return { level: 'Alto', color: '#ef4444' };
return { level: 'Crítico', color: '#991b1b' };
}
module.exports = { calculateBubbleMetrics };
---
const redis = require('redis');
const client = redis.createClient();
async function getCachedData(key) {
const cached = await client.get(key);
return cached ? JSON.parse(cached) : null;
}
async function setCachedData(key, data, ttl = 3600) {
await client.setEx(key, ttl, JSON.stringify(data));
}
// Uso
const data = await getCachedData('bitcoin-metrics');
if (!data) {
const fresh = await fetchAllData('bitcoin');
await setCachedData('bitcoin-metrics', fresh, 3600);
}
-- Schema para armazenar histórico
CREATE TABLE bubble_metrics (
id SERIAL PRIMARY KEY,
asset VARCHAR(50) NOT NULL,
timestamp BIGINT NOT NULL,
adoption FLOAT,
hype FLOAT,
investment FLOAT,
network FLOAT,
feedback FLOAT,
bubble_index FLOAT,
risk_level VARCHAR(20),
raw_data JSONB,
created_at TIMESTAMP DEFAULT NOW()
);
CREATE INDEX idx_asset_timestamp ON bubble_metrics(asset, timestamp DESC);
const { Pool } = require('pg');
const pool = new Pool({ connectionString: process.env.DATABASE_URL });
async function saveMetrics(asset, metrics) {
await pool.query(
INSERT INTO bubble_metrics
(asset, timestamp, adoption, hype, investment, network, feedback, bubble_index, risk_level, raw_data)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10),
[
asset,
metrics.timestamp,
metrics.adoption,
metrics.hype,
metrics.investment,
metrics.network,
metrics.feedback,
metrics.bubbleIndex,
metrics.risk.level,
JSON.stringify(metrics)
]
);
}
async function getHistoricalMetrics(asset, days = 90) {
const result = await pool.query(
SELECT * FROM bubble_metrics
WHERE asset = $1
AND timestamp > $2
ORDER BY timestamp DESC,
[asset, Date.now() - (days * 24 * 60 * 60 * 1000)]
);
return result.rows;
}
---
const config = {
crypto: 'bitcoin',
github: { owner: 'bitcoin', repo: 'bitcoin' },
reddit: 'Bitcoin',
blockchain: 'btc'
};
const analysis = await analyzeAsset(config);
const config = {
github: { owner: 'facebook', repo: 'react' },
npm: 'react',
reddit: 'reactjs'
};
const analysis = await analyzeAsset(config);
const config = {
github: { owner: 'openai', repo: 'gpt-3' },
reddit: 'artificial',
googleTrends: 'GPT',
twitter: 'ChatGPT'
};
const analysis = await analyzeAsset(config);
---
---
---
1. Rate Limits: Sempre respeite os limites de cada API 2. Cache: Use cache agressivamente para reduzir chamadas 3. Custos: APIs gratuitas têm limites - planeje escalonamento 4. Dados Históricos: Armazene métricas para análise temporal 5. Normalização: Diferentes escalas requerem normalização cuidadosa 6. Privacidade: Não armazene dados sensíveis dos usuários