Originally, we planned on using a Unity Assest to query Twitter directly, but quickly decided against it. The Twitter API specification says that not more than 180 requests can be made in any one hour; that's less than three a minute. While that's not actually all that bad, if we ended up with, say, 10 people all running the same app - since each request is linked to a specific user account for authentication - we could quickly use up all our requests before the hour was up.
We eventually decided on a web-server-based query; our PHP script would query Twitter at not more than one request per minute. The Unity app would hit our server with a request for the latest tweets. This means that there's no danger of upsetting the Twitter gods and getting barred for exceeding our bandwidth (we did this a few years back, when first playing with the oAuth API on Twitter - getting banned makes further app development quite tricky!)
By using our own server to cache tweets, we're also able to add in things like filtering, removing or replacing inappropriate content and so on. And it also means that, should we want to use an app-to-Twitter-like interface and time in the future, we can run it completely standalone, or maybe plug it into a different social media platform (the exact same app could read back posts on a Facebook timeline, for example, since it doesn't actually connect to the end point where the messages are created).
Our PHP for scraping tweets looked something like this
<?php
ini_set('display_errors', 1);
// get the last time this script hit twitter.
// make sure we only do it one per minute maximum otherwise we
// might get barred from Twitter altogether!
$run_twitter=0;
if(is_file("/var/tmp/last_ran.r")){
// get the last time the twitter website was polled
$f="/var/tmp/last_ran.r";
$file = fopen($f,"r");
//fread($file,filesize($f));
$ldt=fgets($file);
fclose($file);
$dt=date('YmdHi');
if($dt==$ldt){
$f="/var/tmp/last_tweets.t";
$file = fopen($f,"r");
$s=fread($file,filesize($f));
fclose($file);
echo($s);
}else{
$run_twitter=1;
}
}else{
$run_twitter=1;
}
if($run_twitter==1){
require_once('TwitterAPIExchange.php');
$oauthAccessToken = 'access-token-here';
$oauthAccessTokenSecret = 'access-secret-here';
$oauthConsumerKey = 'consumer-key-here';
$oauthConsumerSecret = 'consumer-secret-here';
$hashtag="nerdclub";
if(isset($_GET['hashtag'])){ $hashtag=trim($_GET['hashtag']); }
$settings = array(
'oauth_access_token' => $oauthAccessToken,
'oauth_access_token_secret' => $oauthAccessTokenSecret,
'consumer_key' => $oauthConsumerKey,
'consumer_secret' => $oauthConsumerSecret
);
$url = 'https://api.twitter.com/1.1/search/tweets.json';
$requestMethod = 'GET';
$getfield = '?q=#'.$hashtag.'&result_type=recent';
// Perform the request
$twitter = new TwitterAPIExchange($settings);
$json= $twitter->setGetfield($getfield)
->buildOauth($url, $requestMethod)
->performRequest();
$twits="";
$response = json_decode($json);
foreach($response->statuses as $tweet) {
$twit = "[{$tweet->user->screen_name}] {$tweet->text} ";
echo($twit);
$twits.=$twit;
}
// now write the current time to the last ran file
$file = fopen("/var/tmp/last_ran.r","w");
fwrite($file,date('YmdHi'));
fclose($file);
// and write the last lot of tweets out to disk
$file2 = fopen("/var/tmp/last_tweets.t","w");
fwrite($file2, $twits);
fclose($file2);
}
?>
ini_set('display_errors', 1);
// get the last time this script hit twitter.
// make sure we only do it one per minute maximum otherwise we
// might get barred from Twitter altogether!
$run_twitter=0;
if(is_file("/var/tmp/last_ran.r")){
// get the last time the twitter website was polled
$f="/var/tmp/last_ran.r";
$file = fopen($f,"r");
//fread($file,filesize($f));
$ldt=fgets($file);
fclose($file);
$dt=date('YmdHi');
if($dt==$ldt){
$f="/var/tmp/last_tweets.t";
$file = fopen($f,"r");
$s=fread($file,filesize($f));
fclose($file);
echo($s);
}else{
$run_twitter=1;
}
}else{
$run_twitter=1;
}
if($run_twitter==1){
require_once('TwitterAPIExchange.php');
$oauthAccessToken = 'access-token-here';
$oauthAccessTokenSecret = 'access-secret-here';
$oauthConsumerKey = 'consumer-key-here';
$oauthConsumerSecret = 'consumer-secret-here';
$hashtag="nerdclub";
if(isset($_GET['hashtag'])){ $hashtag=trim($_GET['hashtag']); }
$settings = array(
'oauth_access_token' => $oauthAccessToken,
'oauth_access_token_secret' => $oauthAccessTokenSecret,
'consumer_key' => $oauthConsumerKey,
'consumer_secret' => $oauthConsumerSecret
);
$url = 'https://api.twitter.com/1.1/search/tweets.json';
$requestMethod = 'GET';
$getfield = '?q=#'.$hashtag.'&result_type=recent';
// Perform the request
$twitter = new TwitterAPIExchange($settings);
$json= $twitter->setGetfield($getfield)
->buildOauth($url, $requestMethod)
->performRequest();
$twits="";
$response = json_decode($json);
foreach($response->statuses as $tweet) {
$twit = "[{$tweet->user->screen_name}] {$tweet->text} ";
echo($twit);
$twits.=$twit;
}
// now write the current time to the last ran file
$file = fopen("/var/tmp/last_ran.r","w");
fwrite($file,date('YmdHi'));
fclose($file);
// and write the last lot of tweets out to disk
$file2 = fopen("/var/tmp/last_tweets.t","w");
fwrite($file2, $twits);
fclose($file2);
}
?>
The PHP file writes the current date, in YmdHi (year, month, day, hour, minute) format to a temporary file on the server. Each time the script runs, it compares the current time to the time it last queried Twitter. If the two are the same, it does not fire off another request, instead returning cached results. If the dates are different, then we know we're only performing, at most, one request per minute, and so gets a fresh lot of data from Twitter.
With this in place, it was just a simple matter of building a script which would return text from our own web server, split it into pieces and display on the appropriate Unity GUI canvas object.
public string hashtag;
private string base_url = "http://www.nerdclub.co.uk/twitter/index.php?hashtag=";
private string twatterString;
private string[] tweets;
private string[] words;
private int tweetIndex;
private int wordIndex;
public Text txtMessage; // link this to the tweet textbox in Unity editor
public float wordSpeed=0.2f;
public float delayBetweenTweets = 4f;
void Start () {
Invoke ("startTwatter", 1f);
}
void startTwatter(){
txtMessage.text = "";
Invoke ("restartTweets", delayBetweenTweets);
}
void restartTweets(){
if (hashtag.Length < 1) { hashtag = "nerdclub"; }
string url = base_url + hashtag;
Debug.Log (url);
WWW www = new WWW(url);
StartCoroutine(WaitForRequest(www));
}
void displayNextTweet(){
// get the name of the person/account who sent the tweet
// split the tweet using spaces to create words
tweetIndex++;
if (tweetIndex >= tweets.Length) {
// end of tweets
Debug.Log ("no more tweets");
txtMessage.text = "";
tweetIndex = -1;
Invoke ("restartTweets", delayBetweenTweets);
} else {
wordIndex = 0;
twatterString = tweets [tweetIndex];
if (twatterString.Trim ().Length == 0) {
Invoke ("displayNextTweet", wordSpeed);
} else {
//words = twatterString.Split (new string[] { " " }, System.StringSplitOptions.None);
words = twatterString.Split (new string[] { " ", " " }, StringSplitOptions.None);
Invoke ("nextWord", wordSpeed);
}
}
}
void nextWord(){
if (wordIndex >= words.Length) {
// end of tweet
Debug.Log ("End of tweet");
Invoke ("displayNextTweet", delayBetweenTweets);
}else{
// display this word
Debug.Log(words[wordIndex]);
txtMessage.text=words[wordIndex];
Invoke ("nextWord", wordSpeed);
}
wordIndex++;
}
IEnumerator WaitForRequest(WWW www){
yield return www;
// check for errors
if (www.error == null) {
Debug.Log ("Response: " + www.text);
twatterString = www.text;
tweets = twatterString.Split(new string[] { "\r\n", "\n" }, StringSplitOptions.None);
tweetIndex = -1;
Invoke("displayNextTweet", delayBetweenTweets);
} else {
Debug.Log("WWW Error: "+ www.error);
}
}
The result looks something like this:
Here we've changed the tag to #harveyandjohn - those crazy inventor types from Brighton who - like us - have been neglecting their Twitter presence, so it was easy to find recent tweets with a matching tag
No comments:
Post a Comment