Skip to content

Commit

Permalink
Use db for caching models
Browse files Browse the repository at this point in the history
  • Loading branch information
xwz committed Sep 18, 2015
1 parent 175f5b1 commit d9df457
Show file tree
Hide file tree
Showing 13 changed files with 301 additions and 99 deletions.
13 changes: 8 additions & 5 deletions base/src/main/java/io/github/xwz/base/Utils.java
Original file line number Diff line number Diff line change
Expand Up @@ -73,11 +73,14 @@ public static String formatMillis(int millis) {
}

public static String stripCategory(String str) {
String[] parts = str.split("/");
if (parts.length > 1) {
return parts[1];
} else {
return str;
if (str != null) {
String[] parts = str.split("/");
if (parts.length > 1) {
return parts[1];
} else {
return str;
}
}
return "";
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
package io.github.xwz.base.content;
package io.github.xwz.base.api;

import com.raizlabs.android.dbflow.annotation.Database;

Expand Down
178 changes: 178 additions & 0 deletions base/src/main/java/io/github/xwz/base/api/ContentDatabaseCache.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
package io.github.xwz.base.api;

import android.util.Log;

import com.raizlabs.android.dbflow.list.FlowCursorList;
import com.raizlabs.android.dbflow.list.FlowQueryList;
import com.raizlabs.android.dbflow.sql.builder.Condition;
import com.raizlabs.android.dbflow.sql.language.Select;

import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;

import io.github.xwz.base.content.ContentCacheManager;
import io.github.xwz.base.content.ContentManagerBase;

public class ContentDatabaseCache {
private static final String TAG = "ContentDatabaseCache";

private static final String TYPE_EPISODES = "EPISODES";
private static final String TYPE_COLLECTIONS = "COLLECTIONS";
private static final String TYPE_SHOWS = "SHOWS";

public void clearCache() {
Log.d(TAG, "Clear db");
FlowQueryList<EpisodeBaseModel> query = new FlowQueryList<>(EpisodeBaseModel.class);
query.clear();
}

public void putShows(Collection<EpisodeBaseModel> shows) {
Log.d(TAG, "store shows into db");
FlowQueryList<EpisodeBaseModel> query = new FlowQueryList<>(EpisodeBaseModel.class);
query.beginTransaction();
for (EpisodeBaseModel ep : shows) {
EpisodeBaseModel model = new EpisodeBaseModel();
model.merge(ep);
model.DATA_TYPE = TYPE_EPISODES;
model.save();
}
query.endTransactionAndNotify();
}

public void putCollections(LinkedHashMap<String, List<EpisodeBaseModel>> collections) {
Log.d(TAG, "store collections into db");
int i = 0;
FlowQueryList<EpisodeBaseModel> query = new FlowQueryList<>(EpisodeBaseModel.class);
query.beginTransaction();
for (Map.Entry<String, List<EpisodeBaseModel>> collection : collections.entrySet()) {
Log.d(TAG, "Adding collection: " + collection.getKey() + " => " + collection.getValue().size());
updateProgress("Loading " + collection.getKey() + "...");
for (EpisodeBaseModel ep : collection.getValue()) {
EpisodeBaseModel model = new EpisodeBaseModel();
model.merge(ep);
model.DATA_TYPE = TYPE_COLLECTIONS;
model.DATA_COLLECTION_KEY = collection.getKey();
model.DATA_COLLECTION_INDEX = i++;
model.save();
}
}
query.endTransactionAndNotify();
}

private void updateProgress(String str) {
ContentManagerBase.getInstance().broadcastChange(ContentManagerBase.CONTENT_SHOW_LIST_PROGRESS, str);
}

public void putEpisodes(Collection<EpisodeBaseModel> episodes) {
Log.d(TAG, "store episodes into db");
FlowQueryList<EpisodeBaseModel> query = new FlowQueryList<>(EpisodeBaseModel.class);
query.beginTransaction();
for (EpisodeBaseModel ep : episodes) {
EpisodeBaseModel model = new EpisodeBaseModel();
model.merge(ep);
model.DATA_TYPE = TYPE_SHOWS;
model.save();
}
query.endTransactionAndNotify();
}

private List<EpisodeBaseModel> getModelsOfType(Class<?> model, String type, List<EpisodeBaseModel> existing, boolean uniqueSeries) {
FlowCursorList<EpisodeBaseModel> cursor = new FlowCursorList<>(false, EpisodeBaseModel.class,
Condition.column(EpisodeBaseModel$Table.DATA_TYPE).eq(type));
Map<String, EpisodeBaseModel> all = new HashMap<>();
for (int i = 0, k = cursor.getCount(); i < k; i++) {
EpisodeBaseModel ep = (EpisodeBaseModel) createInstanceOf(model);
if (ep != null) {
int index = existing.indexOf(ep);
if (index > -1) {
ep = existing.get(index);
} else {
EpisodeBaseModel item = cursor.getItem(i);
item.unserialize();
ep.merge(item);
}
if (uniqueSeries) {
all.put(ep.getSeriesTitle(), ep);
} else {
all.put(ep.getHref(), ep);
}
}
}
cursor.close();
return new ArrayList<>(all.values());
}

private Object createInstanceOf(Class<?> type) {
try {
Constructor<?> ctor = type.getConstructor();
Object object = ctor.newInstance();
return object;
} catch (NoSuchMethodException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}

private LinkedHashMap<String, List<EpisodeBaseModel>> getCollections(Class<?> model, List<EpisodeBaseModel> existing) {
LinkedHashMap<String, List<EpisodeBaseModel>> collections = new LinkedHashMap<>();
FlowCursorList<EpisodeBaseModel> cursor = new FlowCursorList<>(false,
(new Select()).from(EpisodeBaseModel.class)
.where(Condition.column(EpisodeBaseModel$Table.DATA_TYPE).eq(TYPE_COLLECTIONS))
.orderBy(true, EpisodeBaseModel$Table.DATA_COLLECTION_INDEX));
for (int i = 0, k = cursor.getCount(); i < k; i++) {
EpisodeBaseModel item = cursor.getItem(i);
item.unserialize();
int index = existing.indexOf(item);
EpisodeBaseModel ep;
if (index > -1) {
ep = existing.get(index);
} else {
ep = (EpisodeBaseModel) createInstanceOf(model);
if (ep != null) {
ep.merge(item);
}
}
if (ep != null) {
if (!collections.containsKey(item.DATA_COLLECTION_KEY)) {
collections.put(item.DATA_COLLECTION_KEY, new ArrayList<EpisodeBaseModel>());
}
collections.get(item.DATA_COLLECTION_KEY).add(ep);
}
}
for (Map.Entry<String, List<EpisodeBaseModel>> collection : collections.entrySet()) {
Log.d(TAG, "Loaded collection: " + collection.getKey() + " => " + collection.getValue().size());
}
return collections;
}

public boolean loadFromDbCache(ContentCacheManager cache, Class<?> type) {
updateProgress("Loading images...");
List<EpisodeBaseModel> episodes = getModelsOfType(type, TYPE_EPISODES, new ArrayList<EpisodeBaseModel>(), false);
if (episodes.size() > 0) {
updateProgress("Loading TV shows...");
List<EpisodeBaseModel> shows = getModelsOfType(type, TYPE_SHOWS, episodes, true);
updateProgress("Loading movies...");
LinkedHashMap<String, List<EpisodeBaseModel>> collections = getCollections(type, episodes);
cache.putEpisodes(episodes);
cache.putShows(shows);
cache.putCollections(collections);
updateProgress("Loading content...");
cache.buildDictionary(shows);
Log.d(TAG, "Loaded data from database");
return true;
}
return false;
}
}
40 changes: 36 additions & 4 deletions base/src/main/java/io/github/xwz/base/api/EpisodeBaseModel.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@
import com.raizlabs.android.dbflow.annotation.Table;
import com.raizlabs.android.dbflow.structure.BaseModel;

import org.json.JSONArray;
import org.json.JSONException;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
Expand All @@ -17,17 +20,17 @@
import java.util.Set;

import io.github.xwz.base.Utils;
import io.github.xwz.base.content.ContentDatabase;

@Table(databaseName = ContentDatabase.NAME)
public class EpisodeBaseModel extends BaseModel implements Serializable {

private static final String TAG = "EpisodeBaseModel";

@Column
@PrimaryKey
private String href;
@PrimaryKey(autoincrement = true)
public long DATA_ID;

@Column
@PrimaryKey
public String DATA_TYPE;

@Column
Expand All @@ -36,6 +39,9 @@ public class EpisodeBaseModel extends BaseModel implements Serializable {
@Column
public int DATA_COLLECTION_INDEX;

@Column
private String href;

@Column
private String seriesTitle;

Expand Down Expand Up @@ -101,6 +107,32 @@ public void update() {
super.update();
}

void unserialize() {
if (categoriesSerialized != null) {
JSONArray arr = parseArray(categoriesSerialized);
if (arr != null) {
for (int i = 0; i < arr.length();i++) {
try {
addCategory(arr.getString(i));
} catch (JSONException e) {
e.printStackTrace();
}
}
}
}
}

private JSONArray parseArray(String content) {
if (content != null && content.contains("[") && content.contains("]")) {
try {
return new JSONArray(content);
} catch (JSONException e) {
e.printStackTrace();
}
}
return null;
}

public void addCategory(String cat) {
categories.add(cat);
}
Expand Down
32 changes: 0 additions & 32 deletions base/src/main/java/io/github/xwz/base/api/HttpApiBase.java
Original file line number Diff line number Diff line change
Expand Up @@ -170,38 +170,6 @@ private static File createDefaultCacheDir(Context context, String path) {
return cache;
}

protected RadixTree<String> buildWordsFromShows(Collection<EpisodeBaseModel> shows) {
RadixTree<String> dict = new RadixTree<>();
for (EpisodeBaseModel ep : shows) {
dict.putAll(getWords(ep));
}
Log.d(TAG, "dict:" + dict.size());
return dict;
}

private Map<String, String> getWords(EpisodeBaseModel episode) {
Map<String, String> words = new HashMap<>();
if (episode.getSeriesTitle() != null) {
words.putAll(splitWords(episode.getSeriesTitle(), episode));
}
if (episode.getTitle() != null) {
words.putAll(splitWords(episode.getTitle(), episode));
}
return words;
}

private Map<String, String> splitWords(String s, EpisodeBaseModel episode) {
String[] words = s.split("\\s+");
Map<String, String> result = new HashMap<>();
for (String w : words) {
String word = w.replaceAll("[^\\w]", "");
if (word.length() >= 3) {
result.put(word.toLowerCase(), word);
}
}
return result;
}

private static long calculateDiskCacheSize(File dir) {
long size = Math.min(calculateAvailableCacheSize(dir), MAX_DISK_CACHE_SIZE);
return Math.max(size, MIN_DISK_CACHE_SIZE);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ public class ContentCacheManager {
private static final String TAG = "ContentCacheManager";
private final LocalBroadcastManager mBroadcastManager;

private final Map<String, EpisodeBaseModel> mEpisodes = new HashMap<>();
private Map<String, EpisodeBaseModel> mEpisodes = new HashMap<>();
private List<EpisodeBaseModel> mShows = new ArrayList<>();
private RadixTree<String> mDictionary = new RadixTree<>();
private final Map<String, Uri> mStreamUrls = new HashMap<>();
Expand Down Expand Up @@ -87,14 +87,19 @@ synchronized public LinkedHashMap<String, List<EpisodeBaseModel>> getCollections
return new LinkedHashMap<>(mCollections);
}

synchronized public void putEpisodes(Collection<EpisodeBaseModel> episodes) {
mEpisodes = new HashMap<>();
addEpisodes(episodes);
}

synchronized public void addEpisodes(Collection<EpisodeBaseModel> episodes) {
for (EpisodeBaseModel ep : episodes) {
mEpisodes.put(ep.getHref(), ep);
}
}

synchronized public void setDictionary(RadixTree<String> dict) {
mDictionary = dict;
synchronized public void buildDictionary(Collection<EpisodeBaseModel> shows) {
mDictionary = buildWordsFromShows(shows);
}

synchronized public List<String> getSuggestions(String query) {
Expand Down Expand Up @@ -124,4 +129,36 @@ synchronized public void putStreamUrl(String id, Uri url) {
synchronized public Uri getEpisodeStreamUrl(String id) {
return mStreamUrls.get(id);
}

private RadixTree<String> buildWordsFromShows(Collection<EpisodeBaseModel> shows) {
RadixTree<String> dict = new RadixTree<>();
for (EpisodeBaseModel ep : shows) {
dict.putAll(getWords(ep));
}
Log.d(TAG, "dict:" + dict.size());
return dict;
}

private Map<String, String> getWords(EpisodeBaseModel episode) {
Map<String, String> words = new HashMap<>();
if (episode.getSeriesTitle() != null) {
words.putAll(splitWords(episode.getSeriesTitle(), episode));
}
if (episode.getTitle() != null) {
words.putAll(splitWords(episode.getTitle(), episode));
}
return words;
}

private Map<String, String> splitWords(String s, EpisodeBaseModel episode) {
String[] words = s.split("\\s+");
Map<String, String> result = new HashMap<>();
for (String w : words) {
String word = w.replaceAll("[^\\w]", "");
if (word.length() >= 3) {
result.put(word.toLowerCase(), word);
}
}
return result;
}
}
Loading

0 comments on commit d9df457

Please sign in to comment.