示例#1
0
void feed::add_item(feed_item const& item)
{
	// don't add duplicates
	if (m_urls.find(item.url) != m_urls.end())
		return;

	m_urls.insert(item.url);
	m_items.push_back(item);

	feed_item& i = m_items.back();

	if (m_settings.auto_map_handles)
		i.handle = torrent_handle(m_ses.find_torrent(i.uuid.empty() ? i.url : i.uuid));

	if (m_ses.m_alerts.should_post<rss_item_alert>())
		m_ses.m_alerts.post_alert(rss_item_alert(my_handle(), i));

	if (m_settings.auto_download)
	{
		if (!m_settings.auto_map_handles)
			i.handle = torrent_handle(m_ses.find_torrent(i.uuid.empty() ? i.url : i.uuid));

		// if we're already downloading this torrent
		// move along to the next one
		if (i.handle.is_valid()) return;

		// has this already been added?
		if (m_added.find(i.url) != m_added.end()) return;

		// this means we should add this torrent to the session
		add_torrent_params p = m_settings.add_args;
		p.url = i.url;
		p.uuid = i.uuid;
		p.source_feed_url = m_settings.url;
		p.ti.reset();
		p.info_hash.clear();
		p.name = i.title.c_str();

		error_code e;
		m_ses.add_torrent(p, e);
		time_t now = time(NULL);
		m_added.insert(make_pair(i.url, now));
	}
}
示例#2
0
int main(int argc, char* argv[])
{

	My_Timer_Handler my_handle(2,1);

	while (true)
	{
		if (stop_event_loop)
		{
			std::cout<<"stop handle time"<<std::endl;
			break;
		}
		ACE_Reactor::instance()->handle_events();
	}

	// 这种方式无法处理 停止情况  handler中用 exit 不会调用析构函数
	// ACE_Reactor::instance ()->run_reactor_event_loop();

	return 0;
}
示例#3
0
// returns the number of seconds until trying again
int feed::update_feed()
{
	if (m_updating) return 60;

	m_last_attempt = time(0);
	m_last_update = 0;

	if (m_ses.m_alerts.should_post<rss_alert>())
	{
		m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
			, rss_alert::state_updating, error_code()));
	}

	boost::shared_ptr<http_connection> feed(
		new http_connection(m_ses.m_io_service, m_ses.m_half_open
			, boost::bind(&feed::on_feed, shared_from_this()
			, _1, _2, _3, _4)));

	m_updating = true;
	feed->get(m_settings.url, seconds(30), 0, 0, 5, m_ses.m_settings.user_agent);

	return 60 + m_failures * m_failures * 60;
}
示例#4
0
void feed::on_feed(error_code const& ec
	, http_parser const& parser, char const* data, int size)
{
	// enabling this assert makes the unit test a lot more difficult
//	TORRENT_ASSERT(m_updating);
	m_updating = false;

	if (ec && ec != asio::error::eof)
	{
		++m_failures;
		m_error = ec;
		if (m_ses.m_alerts.should_post<rss_alert>())
		{
			m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
				, rss_alert::state_error, m_error));
		}
		return;
	}

	if (parser.status_code() != 200)
	{
		++m_failures;
		m_error = error_code(parser.status_code(), get_http_category());
		if (m_ses.m_alerts.should_post<rss_alert>())
		{
			m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
				, rss_alert::state_error, m_error));
		}
		return;
	}

	m_failures = 0;

	char* buf = const_cast<char*>(data);

	feed_state s(*this);
	xml_parse(buf, buf + size, boost::bind(&parse_feed, boost::ref(s), _1, _2, _3));

	time_t now = time(NULL);

	// keep history of the typical feed size times 5
	int max_history = (std::max)(s.num_items * 5, 100);

	// this is not very efficient, but that's probably OK for now
	while (int(m_added.size()) > max_history)
	{
		// loop over all elements and find the one with the lowest timestamp
		// i.e. it was added the longest ago, then remove it
		std::map<std::string, time_t>::iterator i = std::min_element(
			m_added.begin(), m_added.end()
			, boost::bind(&std::pair<const std::string, time_t>::second, _1)
			< boost::bind(&std::pair<const std::string, time_t>::second, _2));
		m_added.erase(i);
	}

	m_last_update = now;

	// report that we successfully updated the feed
	if (m_ses.m_alerts.should_post<rss_alert>())
	{
		m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
			, rss_alert::state_updated, error_code()));
	}

	// update m_ses.m_next_rss_update timestamps
	// now that we have updated our timestamp
	m_ses.update_rss_feeds();
}
示例#5
0
void feed::on_feed(error_code const& ec
	, http_parser const& parser, char const* data, int size)
{
	// enabling this assert makes the unit test a lot more difficult
//	TORRENT_ASSERT(m_updating);
	m_updating = false;

	if (ec && ec != asio::error::eof)
	{
		++m_failures;
		m_error = ec;
		if (m_ses.m_alerts.should_post<rss_alert>())
		{
			m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
				, rss_alert::state_error, m_error));
		}
		return;
	}

	if (parser.status_code() != 200)
	{
		++m_failures;
		m_error = error_code(parser.status_code(), get_http_category());
		if (m_ses.m_alerts.should_post<rss_alert>())
		{
			m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
				, rss_alert::state_error, m_error));
		}
		return;
	}

	m_failures = 0;

	char* buf = const_cast<char*>(data);

	feed_state s(*this);
	xml_parse(buf, buf + size, boost::bind(&parse_feed, boost::ref(s), _1, _2, _3));

	time_t now = time(NULL);

	if (m_settings.auto_download || m_settings.auto_map_handles)
	{
		for (std::vector<feed_item>::iterator i = m_items.begin()
			, end(m_items.end()); i != end; ++i)
		{
			i->handle = torrent_handle(m_ses.find_torrent(i->uuid.empty() ? i->url : i->uuid));

			// if we're already downloading this torrent, or if we
			// don't have auto-download enabled, just move along to
			// the next one
			if (i->handle.is_valid() || !m_settings.auto_download) continue;

			// has this already been added?
			if (m_added.find(i->url) != m_added.end()) continue;

			// this means we should add this torrent to the session
			add_torrent_params p = m_settings.add_args;
			p.url = i->url;
			p.uuid = i->uuid;
			p.source_feed_url = m_settings.url;
			p.ti.reset();
			p.info_hash.clear();
			p.name = i->title.c_str();

			error_code e;
			torrent_handle h = m_ses.add_torrent(p, e);
			m_ses.m_alerts.post_alert(add_torrent_alert(h, p, e));
			m_added.insert(make_pair(i->url, now));
		}
	}

	m_last_update = now;

	// keep history of the typical feed size times 5
	int max_history = (std::max)(s.num_items * 5, 100);

	// this is not very efficient, but that's probably OK for now
	while (int(m_added.size()) > max_history)
	{
		// loop over all elements and find the one with the lowest timestamp
		// i.e. it was added the longest ago, then remove it
		std::map<std::string, time_t>::iterator i = std::min_element(
			m_added.begin(), m_added.end()
			, boost::bind(&std::pair<const std::string, time_t>::second, _1)
			< boost::bind(&std::pair<const std::string, time_t>::second, _2));
		m_added.erase(i);
	}

	// report that we successfully updated the feed
	if (m_ses.m_alerts.should_post<rss_alert>())
	{
		m_ses.m_alerts.post_alert(rss_alert(my_handle(), m_settings.url
			, rss_alert::state_updated, error_code()));
	}

	// update m_ses.m_next_rss_update timestamps
	// now that we have updated our timestamp
	m_ses.update_rss_feeds();
}