Coverage Report - net.sourceforge.pebble.aggregator.NewsFeedCache
 
Classes in this File Line Coverage Branch Coverage Complexity
NewsFeedCache
0%
0/92
0%
0/50
4.111
 
 1  
 /*
 2  
  * Copyright (c) 2003-2011, Simon Brown
 3  
  * All rights reserved.
 4  
  *
 5  
  * Redistribution and use in source and binary forms, with or without
 6  
  * modification, are permitted provided that the following conditions are met:
 7  
  *
 8  
  *   - Redistributions of source code must retain the above copyright
 9  
  *     notice, this list of conditions and the following disclaimer.
 10  
  *
 11  
  *   - Redistributions in binary form must reproduce the above copyright
 12  
  *     notice, this list of conditions and the following disclaimer in
 13  
  *     the documentation and/or other materials provided with the
 14  
  *     distribution.
 15  
  *
 16  
  *   - Neither the name of Pebble nor the names of its contributors may
 17  
  *     be used to endorse or promote products derived from this software
 18  
  *     without specific prior written permission.
 19  
  *
 20  
  * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
 21  
  * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 22  
  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 23  
  * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
 24  
  * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
 25  
  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
 26  
  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
 27  
  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
 28  
  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
 29  
  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
 30  
  * POSSIBILITY OF SUCH DAMAGE.
 31  
  */
 32  
 
 33  
 package net.sourceforge.pebble.aggregator;
 34  
 
 35  
 import com.sun.syndication.feed.WireFeed;
 36  
 import com.sun.syndication.feed.atom.Content;
 37  
 import com.sun.syndication.feed.atom.Entry;
 38  
 import com.sun.syndication.feed.atom.Link;
 39  
 import com.sun.syndication.feed.rss.Channel;
 40  
 import com.sun.syndication.feed.rss.Item;
 41  
 import com.sun.syndication.io.FeedException;
 42  
 import com.sun.syndication.io.WireFeedInput;
 43  
 import com.sun.syndication.io.XmlReader;
 44  
 import net.sourceforge.pebble.domain.Blog;
 45  
 import org.apache.commons.logging.Log;
 46  
 import org.apache.commons.logging.LogFactory;
 47  
 
 48  
 import java.io.IOException;
 49  
 import java.net.URL;
 50  
 import java.util.*;
 51  
 
 52  
 /**
 53  
  * A cache of newsfeed subscriptions and their entries.
 54  
  *
 55  
  * @author    Simon Brown
 56  
  */
 57  
 public class NewsFeedCache {
 58  
 
 59  
   private static final int FEED_ENTRY_LIMIT = 20;
 60  
 
 61  0
   private static final Log log = LogFactory.getLog(NewsFeedCache.class);
 62  0
   private static final NewsFeedCache instance = new NewsFeedCache();
 63  
 
 64  0
   private final Map<String,Set<String>> subscriptions = new HashMap<String,Set<String>>();
 65  0
   private final Map<String, NewsFeed> feeds = new HashMap<String, NewsFeed>();
 66  0
   private final Map<String,List<NewsFeedEntry>> entries = new HashMap<String,List<NewsFeedEntry>>();
 67  
   
 68  0
   private NewsFeedCache() {
 69  0
   }
 70  
 
 71  
   public static NewsFeedCache getInstance() {
 72  0
     return instance;
 73  
   }
 74  
 
 75  
   public void addSubscription(Blog blog, String url) {
 76  0
     synchronized (feeds) {
 77  0
       Set<String> urls = getUrls(blog.getId());
 78  0
       urls.add(url);
 79  
 
 80  0
       NewsFeed feed = feeds.get(url);
 81  0
       if (feed == null) {
 82  0
         feed = updateFeed(url);
 83  0
         feeds.put(url, feed);
 84  
       }
 85  0
     }
 86  0
   }
 87  
 
 88  
   public void removeAllSubscriptions(Blog blog) {
 89  0
     synchronized (feeds) {
 90  0
       Set<String> urls = getUrls(blog.getId());
 91  0
       urls.clear();
 92  0
     }
 93  0
   }
 94  
 
 95  
   public void refreshFeeds() {
 96  0
     for (String url : feeds.keySet()) {
 97  
       try {
 98  0
         NewsFeed updatedFeed = updateFeed(url);
 99  0
         synchronized (feeds) {
 100  0
           feeds.put(url, updatedFeed);
 101  0
         }
 102  0
       } catch (Exception e) {
 103  0
         log.warn("Couldn't update feed from " + url, e);
 104  0
       }
 105  
     }
 106  
 
 107  0
     for (String blogId : subscriptions.keySet()) {
 108  0
       List<NewsFeedEntry> entriesForBlog = new LinkedList<NewsFeedEntry>();
 109  0
       for (String url : getUrls(blogId)) {
 110  0
         entriesForBlog.addAll(feeds.get(url).getEntries());
 111  
       }
 112  
 
 113  0
       Collections.sort(entriesForBlog, new NewsFeedEntryComparator());
 114  
 
 115  0
       if (entriesForBlog.size() > FEED_ENTRY_LIMIT) {
 116  0
         entriesForBlog = entriesForBlog.subList(0, FEED_ENTRY_LIMIT);
 117  
       }
 118  
 
 119  0
       entries.put(blogId, entriesForBlog);
 120  0
     }
 121  0
   }
 122  
 
 123  
   private NewsFeed updateFeed(String url) {
 124  0
     NewsFeed feed = new NewsFeed(url);
 125  
 
 126  
     try {
 127  0
       log.debug("Refreshing feed from " + url);
 128  
 
 129  
 //      SyndFeedInput input = new SyndFeedInput(true);
 130  
 //      SyndFeed sf = input.build(new XmlReader(new URL(url)));
 131  
 //
 132  
 //      feed.setTitle(sf.getTitle());
 133  
 //      feed.setLink(sf.getLink());
 134  
 //
 135  
 //        for (SyndEntry se : (List<SyndEntry>)sf.getEntries()) {
 136  
 //          log.info(se);
 137  
 //          NewsFeedEntry fe = new NewsFeedEntry(
 138  
 //              se.getLink(),
 139  
 //              se.getTitle(),
 140  
 //              se.getDescription() != null ? se.getDescription().getValue() : "",
 141  
 //              se.getAuthor(),
 142  
 //              se.getPublishedDate()
 143  
 //          );
 144  
 //          feed.add(fe);
 145  
 //          log.info(fe);
 146  
 //        }
 147  
 
 148  0
       WireFeedInput input = new WireFeedInput(true);
 149  0
       WireFeed wf = input.build(new XmlReader(new URL(url)));
 150  
 
 151  0
       if (wf.getFeedType() != null && wf.getFeedType().startsWith("rss")) {
 152  0
         Channel rssFeed = (Channel)wf;
 153  0
         feed.setTitle(rssFeed.getTitle());
 154  0
         feed.setLink(rssFeed.getLink());
 155  
 
 156  0
         for (Item item : (List<Item>)rssFeed.getItems()) {
 157  0
           NewsFeedEntry fe = new NewsFeedEntry(
 158  
               item.getLink(),
 159  
               item.getTitle(),
 160  
               item.getDescription() != null ? item.getDescription().getValue() : "",
 161  
               item.getAuthor(),
 162  
               item.getPubDate()
 163  
           );
 164  0
           feed.add(fe);
 165  0
         }
 166  0
       } else if (wf.getFeedType() != null && wf.getFeedType().startsWith("atom")) {
 167  0
         com.sun.syndication.feed.atom.Feed atomFeed = (com.sun.syndication.feed.atom.Feed)wf;
 168  0
         feed.setTitle(atomFeed.getTitle());
 169  0
         for (Link link : (List<Link>)atomFeed.getAlternateLinks()) {
 170  0
           if ("text/html".equals(link.getType()))
 171  0
             feed.setLink(link.getHref());
 172  
         }
 173  
 
 174  0
         for (Entry entry : (List<Entry>)atomFeed.getEntries()) {
 175  0
           String href = "";
 176  0
           for (Link link : (List<Link>)entry.getAlternateLinks()) {
 177  0
             if ("text/html".equals(link.getType()))
 178  0
               href = link.getHref();
 179  
           }
 180  0
           String body = null;
 181  0
           for (Content content : (List<Content>)entry.getContents()) {
 182  0
             if ("html".equals(content.getType()))
 183  0
               body = content.getValue();
 184  
           }
 185  0
           if (body == null) {
 186  0
             for (Content content : (List<Content>)entry.getSummary()) {
 187  0
               if ("html".equals(content.getType()))
 188  0
                 body = content.getValue();
 189  
             }
 190  
           }
 191  0
           String author = entry.getAuthors() != null && entry.getAuthors().size() > 0 ? entry.getAuthors().get(0).toString() : "";
 192  0
           NewsFeedEntry fe = new NewsFeedEntry(
 193  
               href,
 194  
               entry.getTitle(),
 195  
               body,
 196  
               author,
 197  
               entry.getPublished()
 198  
           );
 199  0
           feed.add(fe);
 200  0
         }
 201  
       }
 202  
 
 203  0
       log.debug("Refreshed feed from " + url);
 204  0
     } catch (FeedException e) {
 205  0
       log.warn("Error while updating feed from " + url, e);
 206  0
     } catch (IOException e) {
 207  0
       log.warn("Error while updating feed from " + url, e);
 208  0
     }
 209  
 
 210  0
     return feed;
 211  
   }
 212  
 
 213  
   public NewsFeed getFeed(String url) {
 214  0
     return feeds.get(url);
 215  
   }
 216  
 
 217  
   public List<NewsFeedEntry> getNewsFeedEntries(Blog blog) {
 218  0
     List<NewsFeedEntry> list = entries.get(blog.getId());
 219  0
     if (list == null) {
 220  0
       list = new LinkedList<NewsFeedEntry>();
 221  
     }
 222  
 
 223  0
     return list;
 224  
   }
 225  
 
 226  
   private Set<String> getUrls(String blogId) {
 227  0
     Set<String> urls = subscriptions.get(blogId);
 228  0
     if (urls == null) {
 229  0
       urls = new HashSet<String>();
 230  0
       subscriptions.put(blogId, urls);
 231  
     }
 232  
 
 233  0
     return urls;
 234  
   }
 235  
 
 236  
 }