package fast.rocket.dispatcher;
import android.os.Process;
import java.util.concurrent.BlockingQueue;
import fast.rocket.cache.Cache;
import fast.rocket.cache.CachePolicy;
import fast.rocket.request.Request;
import fast.rocket.response.NetworkResponse;
import fast.rocket.response.Response;
import fast.rocket.response.ResponseDelivery;
import fast.rocket.utils.Log;
/**
* Provides a thread for performing cache triage on a queue of requests.
*
* Requests added to the specified cache queue are resolved from cache. Any
* deliverable response is posted back to the caller via a
* {@link ResponseDelivery}. Cache misses and responses that require refresh are
* enqueued on the specified network queue for processing by a
* {@link NetworkDispatcher}.
*/
@SuppressWarnings("rawtypes")
public class CacheDispatcher extends Thread {
private static final boolean DEBUG = Log.DEBUG;
/** The queue of requests coming in for triage. */
private final BlockingQueue<Request> mCacheQueue;
/** The queue of requests going out to the network. */
private final BlockingQueue<Request> mNetworkQueue;
/** The cache to read from. */
private final Cache mCache;
/** For posting responses. */
private final ResponseDelivery mDelivery;
/** Used for telling us to die. */
private volatile boolean mQuit = false;
/**
* Creates a new cache triage dispatcher thread. You must call
* {@link #start()} in order to begin processing.
*
* @param cacheQueue
* Queue of incoming requests for triage
* @param networkQueue
* Queue to post requests that require network to
* @param cache
* Cache interface to use for resolution
* @param delivery
* Delivery interface to use for posting responses
*/
public CacheDispatcher(BlockingQueue<Request> cacheQueue,
BlockingQueue<Request> networkQueue, Cache cache,
ResponseDelivery delivery) {
mCacheQueue = cacheQueue;
mNetworkQueue = networkQueue;
mCache = cache;
mDelivery = delivery;
}
/**
* Forces this dispatcher to quit immediately. If any requests are still in
* the queue, they are not guaranteed to be processed.
*/
public void quit() {
mQuit = true;
interrupt();
}
@Override
public void run() {
if (DEBUG)
Log.v("start new dispatcher");
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
// Make a blocking call to initialize the cache.
mCache.initialize();
while (true) {
try {
// Get a request from the cache triage queue, blocking until
// at least one is available.
final Request request = mCacheQueue.take();
request.addMarker("cache-queue-take");
// If the request has been canceled, don't bother dispatching
// it.
if (request.isCanceled()) {
request.finish("cache-discard-canceled");
if (DEBUG) Log.v("cache-discard-canceled");
continue;
}
final CachePolicy cachePolicy = request.getCachePolicy();
if (cachePolicy == CachePolicy.NETWORKFIRST) {
request.addMarker("network-first");
// Do not get from cache; send off to the network
// dispatcher.
mNetworkQueue.put(request);
if (DEBUG) Log.v("network-first");
continue;
}
// Attempt to retrieve this item from cache.
Cache.Entry entry = mCache.get(request.getCacheKey());
if (entry == null) {
request.addMarker("cache-miss");
// Cache miss; send off to the network dispatcher.
mNetworkQueue.put(request);
if (DEBUG) Log.v("cache-miss");
continue;
}
if (cachePolicy == CachePolicy.CACHEONLY) {
request.addMarker("cache-hit");
Response<?> response = request.parseNetworkResponse(new NetworkResponse(
entry.data, entry.responseHeaders));
request.addMarker("cache-hit-parsed");
// Whether expired or not. Just deliver the response.
mDelivery.postResponse(request, response);
continue;
}
if (cachePolicy == CachePolicy.NOCACHE) {
request.addMarker("no-cache");
// Do not get from cache; send off to the network
// dispatcher.
mNetworkQueue.put(request);
if (DEBUG) Log.v("no-cache");
continue;
}
if (cachePolicy == CachePolicy.CACHEFIRST) {
Response<?> response = request.parseNetworkResponse(new NetworkResponse(
entry.data, entry.responseHeaders));
request.addMarker("cache-hit-refresh-needed");
request.setCacheEntry(entry);
// Mark the response as intermediate.
response.intermediate = true;
// Post the intermediate response back to the user and have
// the delivery then forward the request along to the
// network.
mDelivery.postResponse(request, response, new Runnable() {
@Override
public void run() {
try {
mNetworkQueue.put(request);
} catch (InterruptedException e) {
// Not much we can do about this.
}
}
});
continue;
}
// If it is completely expired, just send it to the network.
if (entry.isExpired()) {
request.addMarker("cache-hit-expired");
request.setCacheEntry(entry);
mNetworkQueue.put(request);
if (DEBUG) Log.v("cache-hit-expired");
continue;
}
// We have a cache hit; parse its data for delivery back to the
// request.
request.addMarker("cache-hit");
Response<?> response = request.parseNetworkResponse(new NetworkResponse(entry.data,
entry.responseHeaders));
request.addMarker("cache-hit-parsed");
if (!entry.refreshNeeded()) {
// Completely unexpired cache hit. Just deliver the
// response.
mDelivery.postResponse(request, response);
} else {
// Soft-expired cache hit. We can deliver the cached
// response,
// but we need to also send the request to the network for
// refreshing.
request.addMarker("cache-hit-refresh-needed");
request.setCacheEntry(entry);
// Mark the response as intermediate.
response.intermediate = true;
// Post the intermediate response back to the user and have
// the delivery then forward the request along to the
// network.
mDelivery.postResponse(request, response, new Runnable() {
@Override
public void run() {
try {
mNetworkQueue.put(request);
} catch (InterruptedException e) {
// Not much we can do about this.
}
}
});
}
} catch (InterruptedException e) {
// We may have been interrupted because it was time to quit.
if (mQuit) {
return;
}
continue;
}
}
}
}