Merge "Disconnect the native window in CameraService::Client::disconnect."
diff --git a/api/14.txt b/api/14.txt
index d25d2b9..2a1bcac 100644
--- a/api/14.txt
+++ b/api/14.txt
@@ -19820,8 +19820,8 @@
field public static final java.util.regex.Pattern IP_ADDRESS;
field public static final java.util.regex.Pattern PHONE;
field public static final java.util.regex.Pattern TOP_LEVEL_DOMAIN;
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw])";
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw]))";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw])";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw]))";
field public static final java.util.regex.Pattern WEB_URL;
}
diff --git a/api/current.txt b/api/current.txt
index 4997ae6..a9aaf38 100644
--- a/api/current.txt
+++ b/api/current.txt
@@ -8905,6 +8905,8 @@
method public final void takePicture(android.hardware.Camera.ShutterCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback);
method public final void takePicture(android.hardware.Camera.ShutterCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback, android.hardware.Camera.PictureCallback);
method public final void unlock();
+ field public static final java.lang.String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
+ field public static final java.lang.String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
field public static final int CAMERA_ERROR_SERVER_DIED = 100; // 0x64
field public static final int CAMERA_ERROR_UNKNOWN = 1; // 0x1
}
@@ -14432,6 +14434,7 @@
public class RecoverySystem {
ctor public RecoverySystem();
method public static void installPackage(android.content.Context, java.io.File) throws java.io.IOException;
+ method public static void rebootWipeCache(android.content.Context) throws java.io.IOException;
method public static void rebootWipeUserData(android.content.Context) throws java.io.IOException;
method public static void verifyPackage(java.io.File, android.os.RecoverySystem.ProgressListener, java.io.File) throws java.security.GeneralSecurityException, java.io.IOException;
}
@@ -20713,8 +20716,8 @@
field public static final java.util.regex.Pattern IP_ADDRESS;
field public static final java.util.regex.Pattern PHONE;
field public static final java.util.regex.Pattern TOP_LEVEL_DOMAIN;
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw])";
- field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)|y[et]|z[amw]))";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR = "((aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(biz|b[abdefghijmnorstvwyz])|(cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(edu|e[cegrstu])|f[ijkmor]|(gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(info|int|i[delmnoqrst])|(jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(name|net|n[acefgilopruz])|(org|om)|(pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw])";
+ field public static final java.lang.String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL = "(?:(?:aero|arpa|asia|a[cdefgilmnoqrstuwxz])|(?:biz|b[abdefghijmnorstvwyz])|(?:cat|com|coop|c[acdfghiklmnoruvxyz])|d[ejkmoz]|(?:edu|e[cegrstu])|f[ijkmor]|(?:gov|g[abdefghilmnpqrstuwy])|h[kmnrtu]|(?:info|int|i[delmnoqrst])|(?:jobs|j[emop])|k[eghimnprwyz]|l[abcikrstuvy]|(?:mil|mobi|museum|m[acdeghklmnopqrstuvwxyz])|(?:name|net|n[acefgilopruz])|(?:org|om)|(?:pro|p[aefghklmnrstwy])|qa|r[eosuw]|s[abcdeghijklmnortuvyz]|(?:tel|travel|t[cdfghjklmnoprtvwz])|u[agksyz]|v[aceginu]|w[fs]|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)|y[et]|z[amw]))";
field public static final java.util.regex.Pattern WEB_URL;
}
diff --git a/cmds/dumpstate/Android.mk b/cmds/dumpstate/Android.mk
index 56f1324..d602500 100644
--- a/cmds/dumpstate/Android.mk
+++ b/cmds/dumpstate/Android.mk
@@ -11,4 +11,9 @@
LOCAL_SHARED_LIBRARIES := libcutils
+ifdef BOARD_LIB_DUMPSTATE
+LOCAL_STATIC_LIBRARIES := $(BOARD_LIB_DUMPSTATE)
+LOCAL_CFLAGS += -DBOARD_HAS_DUMPSTATE
+endif
+
include $(BUILD_EXECUTABLE)
diff --git a/cmds/dumpstate/dumpstate.c b/cmds/dumpstate/dumpstate.c
index 42c35af..4926db2 100644
--- a/cmds/dumpstate/dumpstate.c
+++ b/cmds/dumpstate/dumpstate.c
@@ -197,6 +197,15 @@
dump_file(NULL, "/sys/class/leds/lcd-backlight/registers");
printf("\n");
+#ifdef BOARD_HAS_DUMPSTATE
+ printf("========================================================\n");
+ printf("== Board\n");
+ printf("========================================================\n");
+
+ dumpstate_board();
+ printf("\n");
+#endif
+
printf("========================================================\n");
printf("== Android Framework Services\n");
printf("========================================================\n");
@@ -218,6 +227,9 @@
run_command("APP SERVICES", 30, "dumpsys", "activity", "service", "all", NULL);
+ printf("========================================================\n");
+ printf("== dumpstate: done\n");
+ printf("========================================================\n");
}
static void usage() {
diff --git a/cmds/dumpstate/dumpstate.h b/cmds/dumpstate/dumpstate.h
index 83b1d11..597ab1f 100644
--- a/cmds/dumpstate/dumpstate.h
+++ b/cmds/dumpstate/dumpstate.h
@@ -19,6 +19,7 @@
#include <time.h>
#include <unistd.h>
+#include <stdio.h>
/* prints the contents of a file */
int dump_file(const char *title, const char* path);
@@ -47,4 +48,7 @@
/* Play a sound via Stagefright */
void play_sound(const char* path);
+/* Implemented by libdumpstate_board to dump board-specific info */
+void dumpstate_board();
+
#endif /* _DUMPSTATE_H_ */
diff --git a/core/java/android/database/MatrixCursor.java b/core/java/android/database/MatrixCursor.java
index 5c1b968..6e68b6b 100644
--- a/core/java/android/database/MatrixCursor.java
+++ b/core/java/android/database/MatrixCursor.java
@@ -272,6 +272,12 @@
}
@Override
+ public byte[] getBlob(int column) {
+ Object value = get(column);
+ return (byte[]) value;
+ }
+
+ @Override
public int getType(int column) {
return DatabaseUtils.getTypeOfObject(get(column));
}
diff --git a/core/java/android/hardware/Camera.java b/core/java/android/hardware/Camera.java
index 7d67e11..a168260 100644
--- a/core/java/android/hardware/Camera.java
+++ b/core/java/android/hardware/Camera.java
@@ -16,6 +16,8 @@
package android.hardware;
+import android.annotation.SdkConstant;
+import android.annotation.SdkConstant.SdkConstantType;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.SurfaceTexture;
@@ -142,6 +144,22 @@
private boolean mWithBuffer;
/**
+ * Broadcast Action: A new picture is taken by the camera, and the entry of
+ * the picture has been added to the media store.
+ * {@link android.content.Intent#getData} is URI of the picture.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_NEW_PICTURE = "android.hardware.action.NEW_PICTURE";
+
+ /**
+ * Broadcast Action: A new video is recorded by the camera, and the entry
+ * of the video has been added to the media store.
+ * {@link android.content.Intent#getData} is URI of the video.
+ */
+ @SdkConstant(SdkConstantType.BROADCAST_INTENT_ACTION)
+ public static final String ACTION_NEW_VIDEO = "android.hardware.action.NEW_VIDEO";
+
+ /**
* Returns the number of physical cameras available on this device.
*/
public native static int getNumberOfCameras();
diff --git a/core/java/android/os/RecoverySystem.java b/core/java/android/os/RecoverySystem.java
index ae605fb..73e8d98 100644
--- a/core/java/android/os/RecoverySystem.java
+++ b/core/java/android/os/RecoverySystem.java
@@ -357,20 +357,11 @@
}
/**
- * Reboot into the recovery system to wipe the /data partition and toggle
- * Encrypted File Systems on/off.
- * @param extras to add to the RECOVERY_COMPLETED intent after rebooting.
+ * Reboot into the recovery system to wipe the /cache partition.
* @throws IOException if something goes wrong.
- *
- * @hide
*/
- public static void rebootToggleEFS(Context context, boolean efsEnabled)
- throws IOException {
- if (efsEnabled) {
- bootCommand(context, "--set_encrypted_filesystem=on");
- } else {
- bootCommand(context, "--set_encrypted_filesystem=off");
- }
+ public static void rebootWipeCache(Context context) throws IOException {
+ bootCommand(context, "--wipe_cache");
}
/**
diff --git a/core/java/android/util/Patterns.java b/core/java/android/util/Patterns.java
index 3bcd266..152827d 100644
--- a/core/java/android/util/Patterns.java
+++ b/core/java/android/util/Patterns.java
@@ -25,9 +25,9 @@
public class Patterns {
/**
* Regular expression to match all IANA top-level domains.
- * List accurate as of 2010/05/06. List taken from:
+ * List accurate as of 2011/07/18. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
- * This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
+ * This pattern is auto-generated by frameworks/ex/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR =
"((aero|arpa|asia|a[cdefgilmnoqrstuwxz])"
@@ -53,7 +53,7 @@
+ "|u[agksyz]"
+ "|v[aceginu]"
+ "|w[fs]"
- + "|(xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)"
+ + "|(\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)"
+ "|y[et]"
+ "|z[amw])";
@@ -65,9 +65,9 @@
/**
* Regular expression to match all IANA top-level domains for WEB_URL.
- * List accurate as of 2010/05/06. List taken from:
+ * List accurate as of 2011/07/18. List taken from:
* http://data.iana.org/TLD/tlds-alpha-by-domain.txt
- * This pattern is auto-generated by frameworks/base/common/tools/make-iana-tld-pattern.py
+ * This pattern is auto-generated by frameworks/ex/common/tools/make-iana-tld-pattern.py
*/
public static final String TOP_LEVEL_DOMAIN_STR_FOR_WEB_URL =
"(?:"
@@ -94,7 +94,7 @@
+ "|u[agksyz]"
+ "|v[aceginu]"
+ "|w[fs]"
- + "|(?:xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-80akhbyknj4f|xn\\-\\-9t4b11yi5a|xn\\-\\-deba0ad|xn\\-\\-g6w251d|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-mgbaam7a8h|xn\\-\\-mgberp4a5d4ar|xn\\-\\-wgbh1c|xn\\-\\-zckzah)"
+ + "|(?:\u03b4\u03bf\u03ba\u03b9\u03bc\u03ae|\u0438\u0441\u043f\u044b\u0442\u0430\u043d\u0438\u0435|\u0440\u0444|\u0441\u0440\u0431|\u05d8\u05e2\u05e1\u05d8|\u0622\u0632\u0645\u0627\u06cc\u0634\u06cc|\u0625\u062e\u062a\u0628\u0627\u0631|\u0627\u0644\u0627\u0631\u062f\u0646|\u0627\u0644\u062c\u0632\u0627\u0626\u0631|\u0627\u0644\u0633\u0639\u0648\u062f\u064a\u0629|\u0627\u0644\u0645\u063a\u0631\u0628|\u0627\u0645\u0627\u0631\u0627\u062a|\u0628\u06be\u0627\u0631\u062a|\u062a\u0648\u0646\u0633|\u0633\u0648\u0631\u064a\u0629|\u0641\u0644\u0633\u0637\u064a\u0646|\u0642\u0637\u0631|\u0645\u0635\u0631|\u092a\u0930\u0940\u0915\u094d\u0937\u093e|\u092d\u093e\u0930\u0924|\u09ad\u09be\u09b0\u09a4|\u0a2d\u0a3e\u0a30\u0a24|\u0aad\u0abe\u0ab0\u0aa4|\u0b87\u0ba8\u0bcd\u0ba4\u0bbf\u0baf\u0bbe|\u0b87\u0bb2\u0b99\u0bcd\u0b95\u0bc8|\u0b9a\u0bbf\u0b99\u0bcd\u0b95\u0baa\u0bcd\u0baa\u0bc2\u0bb0\u0bcd|\u0baa\u0bb0\u0bbf\u0b9f\u0bcd\u0b9a\u0bc8|\u0c2d\u0c3e\u0c30\u0c24\u0c4d|\u0dbd\u0d82\u0d9a\u0dcf|\u0e44\u0e17\u0e22|\u30c6\u30b9\u30c8|\u4e2d\u56fd|\u4e2d\u570b|\u53f0\u6e7e|\u53f0\u7063|\u65b0\u52a0\u5761|\u6d4b\u8bd5|\u6e2c\u8a66|\u9999\u6e2f|\ud14c\uc2a4\ud2b8|\ud55c\uad6d|xn\\-\\-0zwm56d|xn\\-\\-11b5bs3a9aj6g|xn\\-\\-3e0b707e|xn\\-\\-45brj9c|xn\\-\\-80akhbyknj4f|xn\\-\\-90a3ac|xn\\-\\-9t4b11yi5a|xn\\-\\-clchc0ea0b2g2a9gcd|xn\\-\\-deba0ad|xn\\-\\-fiqs8s|xn\\-\\-fiqz9s|xn\\-\\-fpcrj9c3d|xn\\-\\-fzc2c9e2c|xn\\-\\-g6w251d|xn\\-\\-gecrj9c|xn\\-\\-h2brj9c|xn\\-\\-hgbk6aj7f53bba|xn\\-\\-hlcj6aya9esc7a|xn\\-\\-j6w193g|xn\\-\\-jxalpdlp|xn\\-\\-kgbechtv|xn\\-\\-kprw13d|xn\\-\\-kpry57d|xn\\-\\-lgbbat1ad8j|xn\\-\\-mgbaam7a8h|xn\\-\\-mgbayh7gpa|xn\\-\\-mgbbh1a71e|xn\\-\\-mgbc0a9azcg|xn\\-\\-mgberp4a5d4ar|xn\\-\\-o3cw4h|xn\\-\\-ogbpf8fl|xn\\-\\-p1ai|xn\\-\\-pgbs0dh|xn\\-\\-s9brj9c|xn\\-\\-wgbh1c|xn\\-\\-wgbl6a|xn\\-\\-xkc2al3hye2a|xn\\-\\-xkc2dl3a5ee0h|xn\\-\\-yfro4i67o|xn\\-\\-ygbi2ammx|xn\\-\\-zckzah|xxx)"
+ "|y[et]"
+ "|z[amw]))";
diff --git a/core/java/android/view/Display.java b/core/java/android/view/Display.java
index 2be5a49..5ab2024 100644
--- a/core/java/android/view/Display.java
+++ b/core/java/android/view/Display.java
@@ -118,6 +118,7 @@
} else {
// This is just for boot-strapping, initializing the
// system process before the window manager is up.
+ outSize.x = getRealWidth();
outSize.y = getRealHeight();
}
if (DEBUG_COMPAT && doCompat) Slog.v(TAG, "Returning display size: " + outSize);
diff --git a/core/java/android/view/HardwareRenderer.java b/core/java/android/view/HardwareRenderer.java
index 9a2564f..b865b50 100644
--- a/core/java/android/view/HardwareRenderer.java
+++ b/core/java/android/view/HardwareRenderer.java
@@ -325,12 +325,15 @@
private static final int SURFACE_STATE_SUCCESS = 1;
private static final int SURFACE_STATE_UPDATED = 2;
- static EGLContext sEglContext;
static EGL10 sEgl;
static EGLDisplay sEglDisplay;
static EGLConfig sEglConfig;
+ static final Object[] sEglLock = new Object[0];
- private static Thread sEglThread;
+ static final ThreadLocal<EGLContext> sEglContextStorage = new ThreadLocal<EGLContext>();
+
+ EGLContext mEglContext;
+ Thread mEglThread;
EGLSurface mEglSurface;
@@ -355,7 +358,7 @@
final boolean mTranslucent;
private boolean mDestroyed;
-
+
private final Rect mRedrawClip = new Rect();
GlRenderer(int glVersion, boolean translucent) {
@@ -487,45 +490,48 @@
abstract GLES20Canvas createCanvas();
void initializeEgl() {
- if (sEglContext != null) return;
-
- sEglThread = Thread.currentThread();
- sEgl = (EGL10) EGLContext.getEGL();
-
- // Get to the default display.
- sEglDisplay = sEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
-
- if (sEglDisplay == EGL10.EGL_NO_DISPLAY) {
- throw new RuntimeException("eglGetDisplay failed "
- + getEGLErrorString(sEgl.eglGetError()));
- }
-
- // We can now initialize EGL for that display
- int[] version = new int[2];
- if (!sEgl.eglInitialize(sEglDisplay, version)) {
- throw new RuntimeException("eglInitialize failed " +
- getEGLErrorString(sEgl.eglGetError()));
- }
-
- sEglConfig = chooseEglConfig();
- if (sEglConfig == null) {
- // We tried to use EGL_SWAP_BEHAVIOR_PRESERVED_BIT, try again without
- if (sDirtyRegions) {
- sDirtyRegions = false;
+ synchronized (sEglLock) {
+ if (sEgl == null && sEglConfig == null) {
+ sEgl = (EGL10) EGLContext.getEGL();
+
+ // Get to the default display.
+ sEglDisplay = sEgl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY);
+
+ if (sEglDisplay == EGL10.EGL_NO_DISPLAY) {
+ throw new RuntimeException("eglGetDisplay failed "
+ + getEGLErrorString(sEgl.eglGetError()));
+ }
+
+ // We can now initialize EGL for that display
+ int[] version = new int[2];
+ if (!sEgl.eglInitialize(sEglDisplay, version)) {
+ throw new RuntimeException("eglInitialize failed " +
+ getEGLErrorString(sEgl.eglGetError()));
+ }
+
sEglConfig = chooseEglConfig();
if (sEglConfig == null) {
- throw new RuntimeException("eglConfig not initialized");
+ // We tried to use EGL_SWAP_BEHAVIOR_PRESERVED_BIT, try again without
+ if (sDirtyRegions) {
+ sDirtyRegions = false;
+ sEglConfig = chooseEglConfig();
+ if (sEglConfig == null) {
+ throw new RuntimeException("eglConfig not initialized");
+ }
+ } else {
+ throw new RuntimeException("eglConfig not initialized");
+ }
}
- } else {
- throw new RuntimeException("eglConfig not initialized");
}
}
-
- /*
- * Create an EGL context. We want to do this as rarely as we can, because an
- * EGL context is a somewhat heavy object.
- */
- sEglContext = createContext(sEgl, sEglDisplay, sEglConfig);
+
+ mEglContext = sEglContextStorage.get();
+ mEglThread = Thread.currentThread();
+
+ if (mEglContext == null) {
+ mEglContext = createContext(sEgl, sEglDisplay, sEglConfig);
+ sEglContextStorage.set(mEglContext);
+ }
}
private EGLConfig chooseEglConfig() {
@@ -554,7 +560,7 @@
if (sEglConfig == null) {
throw new RuntimeException("eglConfig not initialized");
}
- if (Thread.currentThread() != sEglThread) {
+ if (Thread.currentThread() != mEglThread) {
throw new IllegalStateException("HardwareRenderer cannot be used "
+ "from multiple threads");
}
@@ -590,7 +596,7 @@
* Before we can issue GL commands, we need to make sure
* the context is current and bound to a surface.
*/
- if (!sEgl.eglMakeCurrent(sEglDisplay, mEglSurface, mEglSurface, sEglContext)) {
+ if (!sEgl.eglMakeCurrent(sEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
throw new Surface.OutOfResourcesException("eglMakeCurrent failed "
+ getEGLErrorString(sEgl.eglGetError()));
}
@@ -611,7 +617,7 @@
mDirtyRegionsEnabled = GLES20Canvas.isBackBufferPreserved();
}
- return sEglContext.getGL();
+ return mEglContext.getGL();
}
EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {
@@ -752,22 +758,22 @@
}
/**
- * Ensures the currnet EGL context is the one we expect.
+ * Ensures the current EGL context is the one we expect.
*
* @return {@link #SURFACE_STATE_ERROR} if the correct EGL context cannot be made current,
* {@link #SURFACE_STATE_UPDATED} if the EGL context was changed or
* {@link #SURFACE_STATE_SUCCESS} if the EGL context was the correct one
*/
private int checkCurrent() {
- if (sEglThread != Thread.currentThread()) {
+ if (mEglThread != Thread.currentThread()) {
throw new IllegalStateException("Hardware acceleration can only be used with a " +
- "single UI thread.\nOriginal thread: " + sEglThread + "\n" +
+ "single UI thread.\nOriginal thread: " + mEglThread + "\n" +
"Current thread: " + Thread.currentThread());
}
- if (!sEglContext.equals(sEgl.eglGetCurrentContext()) ||
+ if (!mEglContext.equals(sEgl.eglGetCurrentContext()) ||
!mEglSurface.equals(sEgl.eglGetCurrentSurface(EGL10.EGL_DRAW))) {
- if (!sEgl.eglMakeCurrent(sEglDisplay, mEglSurface, mEglSurface, sEglContext)) {
+ if (!sEgl.eglMakeCurrent(sEglDisplay, mEglSurface, mEglSurface, mEglContext)) {
fallback(true);
Log.e(LOG_TAG, "eglMakeCurrent failed " +
getEGLErrorString(sEgl.eglGetError()));
diff --git a/core/java/android/view/ViewGroup.java b/core/java/android/view/ViewGroup.java
index a6c158d..54fee3c 100644
--- a/core/java/android/view/ViewGroup.java
+++ b/core/java/android/view/ViewGroup.java
@@ -843,7 +843,7 @@
* @param child
* @param visibility
*/
- void onChildVisibilityChanged(View child, int visibility) {
+ protected void onChildVisibilityChanged(View child, int visibility) {
if (mTransition != null) {
if (visibility == VISIBLE) {
mTransition.showChild(this, child);
@@ -3964,7 +3964,8 @@
final int left = mLeft;
final int top = mTop;
- if (dirty.intersect(0, 0, mRight - left, mBottom - top) ||
+ if ((mGroupFlags & FLAG_CLIP_CHILDREN) != FLAG_CLIP_CHILDREN ||
+ dirty.intersect(0, 0, mRight - left, mBottom - top) ||
(mPrivateFlags & DRAW_ANIMATION) == DRAW_ANIMATION) {
mPrivateFlags &= ~DRAWING_CACHE_VALID;
@@ -3982,8 +3983,12 @@
location[CHILD_LEFT_INDEX] = mLeft;
location[CHILD_TOP_INDEX] = mTop;
-
- dirty.set(0, 0, mRight - mLeft, mBottom - mTop);
+ if ((mGroupFlags & FLAG_CLIP_CHILDREN) == FLAG_CLIP_CHILDREN) {
+ dirty.set(0, 0, mRight - mLeft, mBottom - mTop);
+ } else {
+ // in case the dirty rect extends outside the bounds of this container
+ dirty.union(0, 0, mRight - mLeft, mBottom - mTop);
+ }
if (mLayerType != LAYER_TYPE_NONE) {
mLocalDirtyRect.union(dirty);
diff --git a/core/java/android/webkit/WebViewCore.java b/core/java/android/webkit/WebViewCore.java
index 5414b79..d7a2526 100644
--- a/core/java/android/webkit/WebViewCore.java
+++ b/core/java/android/webkit/WebViewCore.java
@@ -2344,7 +2344,9 @@
webViewWidth = mWebView.getViewWidth();
viewportWidth = (int) (webViewWidth / adjust);
if (viewportWidth == 0) {
- Log.w(LOGTAG, "Can't get the viewWidth after the first layout");
+ if (DebugFlags.WEB_VIEW_CORE) {
+ Log.v(LOGTAG, "Can't get the viewWidth yet");
+ }
}
} else {
webViewWidth = Math.round(viewportWidth * mCurrentViewScale);
diff --git a/core/java/android/widget/Gallery.java b/core/java/android/widget/Gallery.java
index 0ffd087..3f5b571c 100644
--- a/core/java/android/widget/Gallery.java
+++ b/core/java/android/widget/Gallery.java
@@ -16,28 +16,28 @@
package android.widget;
-import com.android.internal.R;
-
import android.annotation.Widget;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
+import android.view.ContextMenu.ContextMenuInfo;
import android.view.GestureDetector;
import android.view.Gravity;
import android.view.HapticFeedbackConstants;
import android.view.KeyEvent;
import android.view.MotionEvent;
+import android.view.SoundEffectConstants;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
-import android.view.SoundEffectConstants;
-import android.view.ContextMenu.ContextMenuInfo;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.animation.Transformation;
+import com.android.internal.R;
+
/**
* A view that shows items in a center-locked, horizontally scrolling list.
* <p>
@@ -122,6 +122,7 @@
* in the future. It will also trigger a selection changed.
*/
private Runnable mDisableSuppressSelectionChangedRunnable = new Runnable() {
+ @Override
public void run() {
mSuppressSelectionChanged = false;
selectionChanged();
@@ -171,6 +172,12 @@
* drag sends many onScrolls).
*/
private boolean mIsFirstScroll;
+
+ /**
+ * If true, mFirstPosition is the position of the rightmost child, and
+ * the children are ordered right to left.
+ */
+ private boolean mIsRtl = true;
public Gallery(Context context) {
this(context, null);
@@ -418,7 +425,7 @@
}
int getLimitedMotionScrollAmount(boolean motionToLeft, int deltaX) {
- int extremeItemPosition = motionToLeft ? mItemCount - 1 : 0;
+ int extremeItemPosition = motionToLeft != mIsRtl ? mItemCount - 1 : 0;
View extremeChild = getChildAt(extremeItemPosition - mFirstPosition);
if (extremeChild == null) {
@@ -490,31 +497,40 @@
if (toLeft) {
final int galleryLeft = mPaddingLeft;
for (int i = 0; i < numChildren; i++) {
- final View child = getChildAt(i);
+ int n = mIsRtl ? (numChildren - 1 - i) : i;
+ final View child = getChildAt(n);
if (child.getRight() >= galleryLeft) {
break;
} else {
+ start = n;
count++;
- mRecycler.put(firstPosition + i, child);
+ mRecycler.put(firstPosition + n, child);
}
}
+ if (!mIsRtl) {
+ start = 0;
+ }
} else {
final int galleryRight = getWidth() - mPaddingRight;
for (int i = numChildren - 1; i >= 0; i--) {
- final View child = getChildAt(i);
+ int n = mIsRtl ? numChildren - 1 - i : i;
+ final View child = getChildAt(n);
if (child.getLeft() <= galleryRight) {
break;
} else {
- start = i;
+ start = n;
count++;
- mRecycler.put(firstPosition + i, child);
+ mRecycler.put(firstPosition + n, child);
}
}
+ if (mIsRtl) {
+ start = 0;
+ }
}
detachViewsFromParent(start, count);
- if (toLeft) {
+ if (toLeft != mIsRtl) {
mFirstPosition += count;
}
}
@@ -614,6 +630,8 @@
@Override
void layout(int delta, boolean animate) {
+ mIsRtl = isLayoutRtl();
+
int childrenLeft = mSpinnerPadding.left;
int childrenWidth = mRight - mLeft - mSpinnerPadding.left - mSpinnerPadding.right;
@@ -676,6 +694,45 @@
}
private void fillToGalleryLeft() {
+ if (mIsRtl) {
+ fillToGalleryLeftRtl();
+ } else {
+ fillToGalleryLeftLtr();
+ }
+ }
+
+ private void fillToGalleryLeftRtl() {
+ int itemSpacing = mSpacing;
+ int galleryLeft = mPaddingLeft;
+ int numChildren = getChildCount();
+ int numItems = mItemCount;
+
+ // Set state for initial iteration
+ View prevIterationView = getChildAt(numChildren - 1);
+ int curPosition;
+ int curRightEdge;
+
+ if (prevIterationView != null) {
+ curPosition = mFirstPosition + numChildren;
+ curRightEdge = prevIterationView.getLeft() - itemSpacing;
+ } else {
+ // No children available!
+ mFirstPosition = curPosition = mItemCount - 1;
+ curRightEdge = mRight - mLeft - mPaddingRight;
+ mShouldStopFling = true;
+ }
+
+ while (curRightEdge > galleryLeft && curPosition < mItemCount) {
+ prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
+ curRightEdge, false);
+
+ // Set state for next iteration
+ curRightEdge = prevIterationView.getLeft() - itemSpacing;
+ curPosition++;
+ }
+ }
+
+ private void fillToGalleryLeftLtr() {
int itemSpacing = mSpacing;
int galleryLeft = mPaddingLeft;
@@ -708,6 +765,45 @@
}
private void fillToGalleryRight() {
+ if (mIsRtl) {
+ fillToGalleryRightRtl();
+ } else {
+ fillToGalleryRightLtr();
+ }
+ }
+
+ private void fillToGalleryRightRtl() {
+ int itemSpacing = mSpacing;
+ int galleryRight = mRight - mLeft - mPaddingRight;
+
+ // Set state for initial iteration
+ View prevIterationView = getChildAt(0);
+ int curPosition;
+ int curLeftEdge;
+
+ if (prevIterationView != null) {
+ curPosition = mFirstPosition -1;
+ curLeftEdge = prevIterationView.getRight() + itemSpacing;
+ } else {
+ curPosition = 0;
+ curLeftEdge = mPaddingLeft;
+ mShouldStopFling = true;
+ }
+
+ while (curLeftEdge < galleryRight && curPosition >= 0) {
+ prevIterationView = makeAndAddView(curPosition, curPosition - mSelectedPosition,
+ curLeftEdge, true);
+
+ // Remember some state
+ mFirstPosition = curPosition;
+
+ // Set state for next iteration
+ curLeftEdge = prevIterationView.getRight() + itemSpacing;
+ curPosition--;
+ }
+ }
+
+ private void fillToGalleryRightLtr() {
int itemSpacing = mSpacing;
int galleryRight = mRight - mLeft - mPaddingRight;
int numChildren = getChildCount();
@@ -745,18 +841,16 @@
*
* @param position Position in the gallery for the view to obtain
* @param offset Offset from the selected position
- * @param x X-coordintate indicating where this view should be placed. This
+ * @param x X-coordinate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
- * the fromLeft paramter
- * @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
+ * the fromLeft parameter
+ * @param fromLeft Are we positioning views based on the left edge? (i.e.,
* building from left to right)?
* @return A view that has been added to the gallery
*/
- private View makeAndAddView(int position, int offset, int x,
- boolean fromLeft) {
+ private View makeAndAddView(int position, int offset, int x, boolean fromLeft) {
View child;
-
if (!mDataChanged) {
child = mRecycler.get(position);
if (child != null) {
@@ -786,27 +880,26 @@
/**
* Helper for makeAndAddView to set the position of a view and fill out its
- * layout paramters.
+ * layout parameters.
*
* @param child The view to position
* @param offset Offset from the selected position
- * @param x X-coordintate indicating where this view should be placed. This
+ * @param x X-coordinate indicating where this view should be placed. This
* will either be the left or right edge of the view, depending on
- * the fromLeft paramter
- * @param fromLeft Are we posiitoning views based on the left edge? (i.e.,
+ * the fromLeft parameter
+ * @param fromLeft Are we positioning views based on the left edge? (i.e.,
* building from left to right)?
*/
private void setUpChild(View child, int offset, int x, boolean fromLeft) {
// Respect layout params that are already in the view. Otherwise
// make some up...
- Gallery.LayoutParams lp = (Gallery.LayoutParams)
- child.getLayoutParams();
+ Gallery.LayoutParams lp = (Gallery.LayoutParams) child.getLayoutParams();
if (lp == null) {
lp = (Gallery.LayoutParams) generateDefaultLayoutParams();
}
- addViewInLayout(child, fromLeft ? -1 : 0, lp);
+ addViewInLayout(child, fromLeft != mIsRtl ? -1 : 0, lp);
child.setSelected(offset == 0);
@@ -883,9 +976,7 @@
return retValue;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onSingleTapUp(MotionEvent e) {
if (mDownTouchPosition >= 0) {
@@ -905,9 +996,7 @@
return false;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onFling(MotionEvent e1, MotionEvent e2, float velocityX, float velocityY) {
if (!mShouldCallbackDuringFling) {
@@ -926,9 +1015,7 @@
return true;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onScroll(MotionEvent e1, MotionEvent e2, float distanceX, float distanceY) {
if (localLOGV) Log.v(TAG, String.valueOf(e2.getX() - e1.getX()));
@@ -967,9 +1054,7 @@
return true;
}
- /**
- * {@inheritDoc}
- */
+ @Override
public boolean onDown(MotionEvent e) {
// Kill any existing fling/scroll
@@ -1009,9 +1094,7 @@
onUp();
}
- /**
- * {@inheritDoc}
- */
+ @Override
public void onLongPress(MotionEvent e) {
if (mDownTouchPosition < 0) {
@@ -1025,9 +1108,7 @@
// Unused methods from GestureDetector.OnGestureListener below
- /**
- * {@inheritDoc}
- */
+ @Override
public void onShowPress(MotionEvent e) {
}
@@ -1164,6 +1245,7 @@
dispatchPress(mSelectedChild);
postDelayed(new Runnable() {
+ @Override
public void run() {
dispatchUnpress();
}
@@ -1278,10 +1360,10 @@
// Draw the selected child last
return selectedIndex;
} else if (i >= selectedIndex) {
- // Move the children to the right of the selected child earlier one
+ // Move the children after the selected child earlier one
return i + 1;
} else {
- // Keep the children to the left of the selected child the same
+ // Keep the children before the selected child the same
return i;
}
}
@@ -1306,7 +1388,6 @@
* Responsible for fling behavior. Use {@link #startUsingVelocity(int)} to
* initiate a fling. Each frame of the fling is handled in {@link #run()}.
* A FlingRunnable will keep re-posting itself until the fling is done.
- *
*/
private class FlingRunnable implements Runnable {
/**
@@ -1365,6 +1446,7 @@
if (scrollIntoSlots) scrollIntoSlots();
}
+ @Override
public void run() {
if (mItemCount == 0) {
@@ -1384,15 +1466,17 @@
// Pretend that each frame of a fling scroll is a touch scroll
if (delta > 0) {
- // Moving towards the left. Use first view as mDownTouchPosition
- mDownTouchPosition = mFirstPosition;
+ // Moving towards the left. Use leftmost view as mDownTouchPosition
+ mDownTouchPosition = mIsRtl ? (mFirstPosition + getChildCount() - 1) :
+ mFirstPosition;
// Don't fling more than 1 screen
delta = Math.min(getWidth() - mPaddingLeft - mPaddingRight - 1, delta);
} else {
- // Moving towards the right. Use last view as mDownTouchPosition
+ // Moving towards the right. Use rightmost view as mDownTouchPosition
int offsetToLast = getChildCount() - 1;
- mDownTouchPosition = mFirstPosition + offsetToLast;
+ mDownTouchPosition = mIsRtl ? mFirstPosition :
+ (mFirstPosition + getChildCount() - 1);
// Don't fling more than 1 screen
delta = Math.max(-(getWidth() - mPaddingRight - mPaddingLeft - 1), delta);
@@ -1414,7 +1498,6 @@
* Gallery extends LayoutParams to provide a place to hold current
* Transformation information along with previous position/transformation
* info.
- *
*/
public static class LayoutParams extends ViewGroup.LayoutParams {
public LayoutParams(Context c, AttributeSet attrs) {
diff --git a/core/java/android/widget/GridLayout.java b/core/java/android/widget/GridLayout.java
index 6df8efb..5747fd3 100644
--- a/core/java/android/widget/GridLayout.java
+++ b/core/java/android/widget/GridLayout.java
@@ -757,18 +757,36 @@
// Add/remove
+ /**
+ * @hide
+ */
@Override
protected void onViewAdded(View child) {
super.onViewAdded(child);
invalidateStructure();
}
+ /**
+ * @hide
+ */
@Override
protected void onViewRemoved(View child) {
super.onViewRemoved(child);
invalidateStructure();
}
+ /**
+ * We need to call invalidateStructure() when a child's GONE flag changes state.
+ * This implementation is a catch-all, invalidating on any change in the visibility flags.
+ *
+ * @hide
+ */
+ @Override
+ protected void onChildVisibilityChanged(View child, int visibility) {
+ super.onChildVisibilityChanged(child, visibility);
+ invalidateStructure();
+ }
+
// Measurement
private boolean isGone(View c) {
diff --git a/core/java/android/widget/TextView.java b/core/java/android/widget/TextView.java
index 66a07d3..2591cb3 100644
--- a/core/java/android/widget/TextView.java
+++ b/core/java/android/widget/TextView.java
@@ -362,7 +362,7 @@
INHERIT, GRAVITY, TEXT_START, TEXT_END, CENTER, VIEW_START, VIEW_END;
}
- private boolean bResolvedDrawables = false;
+ private boolean mResolvedDrawables = false;
/*
* Kick-start the font cache for the zygote process (to pay the cost of
@@ -10511,7 +10511,7 @@
*/
protected void resolveDrawables() {
// No need to resolve twice
- if (bResolvedDrawables) {
+ if (mResolvedDrawables) {
return;
}
// No drawable to resolve
@@ -10520,7 +10520,7 @@
}
// No relative drawable to resolve
if (mDrawables.mDrawableStart == null && mDrawables.mDrawableEnd == null) {
- bResolvedDrawables = true;
+ mResolvedDrawables = true;
return;
}
@@ -10557,11 +10557,11 @@
}
break;
}
- bResolvedDrawables = true;
+ mResolvedDrawables = true;
}
protected void resetResolvedDrawables() {
- bResolvedDrawables = false;
+ mResolvedDrawables = false;
}
@ViewDebug.ExportedProperty(category = "text")
diff --git a/core/res/res/layout-sw600dp/preference_list_content.xml b/core/res/res/layout-sw600dp/preference_list_content.xml
index a5320a7..5b67d71 100644
--- a/core/res/res/layout-sw600dp/preference_list_content.xml
+++ b/core/res/res/layout-sw600dp/preference_list_content.xml
@@ -46,7 +46,6 @@
android:layout_weight="1"
android:paddingTop="16dp"
android:paddingBottom="16dp"
-
android:drawSelectorOnTop="false"
android:cacheColorHint="@android:color/transparent"
android:listPreferredItemHeight="48dp"
diff --git a/core/res/res/values-land/dimens.xml b/core/res/res/values-land/dimens.xml
index ec2313c..388eb38 100644
--- a/core/res/res/values-land/dimens.xml
+++ b/core/res/res/values-land/dimens.xml
@@ -25,8 +25,6 @@
<dimen name="password_keyboard_key_height_numeric">60dip</dimen>
<!-- Default correction for the space key in the password keyboard -->
<dimen name="password_keyboard_spacebar_vertical_correction">2dip</dimen>
- <dimen name="preference_screen_side_margin">16dp</dimen>
- <dimen name="preference_screen_side_margin_negative">-20dp</dimen>
<dimen name="preference_widget_width">72dp</dimen>
<!-- Default height of an action bar. -->
diff --git a/core/res/res/values-sw600dp/dimens.xml b/core/res/res/values-sw600dp/dimens.xml
index 17bf561..553632b 100644
--- a/core/res/res/values-sw600dp/dimens.xml
+++ b/core/res/res/values-sw600dp/dimens.xml
@@ -45,6 +45,13 @@
<dimen name="keyguard_pattern_unlock_status_line_font_size">14sp</dimen>
<!-- Preference activity, vertical padding for the header list -->
- <dimen name="reference_screen_header_vertical_padding">16dp</dimen>
+ <dimen name="preference_screen_header_vertical_padding">16dp</dimen>
+
+ <!-- Reduce the margin when using dual pane -->
+ <!-- Preference activity side margins -->
+ <dimen name="preference_screen_side_margin">0dp</dimen>
+ <!-- Preference activity side margins negative-->
+ <dimen name="preference_screen_side_margin_negative">-4dp</dimen>
+
</resources>
diff --git a/core/res/res/values-w1024dp/dimens.xml b/core/res/res/values-w1024dp/dimens.xml
new file mode 100644
index 0000000..4d6a4da
--- /dev/null
+++ b/core/res/res/values-w1024dp/dimens.xml
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<resources>
+ <!-- Increase size (used to be 0 for dual pane -->
+ <!-- Preference activity side margins -->
+ <dimen name="preference_screen_side_margin">48dp</dimen>
+ <!-- Preference activity side margins negative-->
+ <dimen name="preference_screen_side_margin_negative">-52dp</dimen>
+</resources>
diff --git a/core/res/res/values-w1280dp/dimens.xml b/core/res/res/values-w1280dp/dimens.xml
index e67b3a9..28aea55 100644
--- a/core/res/res/values-w1280dp/dimens.xml
+++ b/core/res/res/values-w1280dp/dimens.xml
@@ -18,6 +18,7 @@
-->
<resources>
<dimen name="preference_screen_side_margin">96dp</dimen>
+ <!-- Compensate for double margin : preference_screen_side_margin + 4 (frame background shadow) = -preference_screen_side_margin_negative -->
<dimen name="preference_screen_side_margin_negative">-100dp</dimen>
<dimen name="preference_widget_width">64dp</dimen>
<!-- Preference fragment padding, bottom -->
diff --git a/core/res/res/values/dimens.xml b/core/res/res/values/dimens.xml
index 2ba4e66..0ed8076 100644
--- a/core/res/res/values/dimens.xml
+++ b/core/res/res/values/dimens.xml
@@ -67,9 +67,9 @@
<dimen name="multiwaveview_hit_radius">60dip</dimen>
<!-- Preference activity side margins -->
- <dimen name="preference_screen_side_margin">0dp</dimen>
+ <dimen name="preference_screen_side_margin">16dp</dimen>
<!-- Preference activity side margins negative-->
- <dimen name="preference_screen_side_margin_negative">0dp</dimen>
+ <dimen name="preference_screen_side_margin_negative">16dp</dimen>
<!-- Preference activity top margin -->
<dimen name="preference_screen_top_margin">0dp</dimen>
<!-- Preference activity bottom margin -->
diff --git a/core/tests/coretests/src/android/database/MatrixCursorTest.java b/core/tests/coretests/src/android/database/MatrixCursorTest.java
index cddc6c4..cdab638 100644
--- a/core/tests/coretests/src/android/database/MatrixCursorTest.java
+++ b/core/tests/coretests/src/android/database/MatrixCursorTest.java
@@ -16,6 +16,8 @@
package android.database;
+import android.test.MoreAsserts;
+
import junit.framework.TestCase;
import java.util.*;
@@ -33,6 +35,7 @@
cursor.moveToNext();
assertTrue(cursor.isNull(0));
assertNull(cursor.getString(0));
+ assertNull(cursor.getBlob(0));
assertEquals(0, cursor.getShort(0));
assertEquals(0, cursor.getInt(0));
assertEquals(0L, cursor.getLong(0));
@@ -49,7 +52,8 @@
.add(2)
.add(3)
.add(4)
- .add(5);
+ .add(5)
+ .add(new byte[] {(byte) 0xaa, (byte) 0x55});
cursor.moveToNext();
@@ -61,7 +65,8 @@
.add("2")
.add("3")
.add("4")
- .add("5");
+ .add("5")
+ .add(new byte[] {(byte) 0xaa, (byte) 0x55});
cursor.moveToNext();
checkValues(cursor);
@@ -73,7 +78,7 @@
public void testAddArray() {
MatrixCursor cursor = newMatrixCursor();
- cursor.addRow(new Object[] { "a", 1, 2, 3, 4, 5 });
+ cursor.addRow(new Object[] { "a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55} });
cursor.moveToNext();
checkValues(cursor);
@@ -86,7 +91,7 @@
public void testAddIterable() {
MatrixCursor cursor = newMatrixCursor();
- cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5));
+ cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55}));
cursor.moveToNext();
checkValues(cursor);
@@ -96,7 +101,8 @@
} catch (IllegalArgumentException e) { /* expected */ }
try {
- cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5, "Too many!"));
+ cursor.addRow(Arrays.asList("a", 1, 2, 3, 4, 5,
+ new byte[] {(byte) 0xaa, (byte) 0x55}, "Too many!"));
fail();
} catch (IllegalArgumentException e) { /* expected */ }
}
@@ -105,7 +111,7 @@
MatrixCursor cursor = newMatrixCursor();
cursor.addRow(new NonIterableArrayList<Object>(
- Arrays.asList("a", 1, 2, 3, 4, 5)));
+ Arrays.asList("a", 1, 2, 3, 4, 5, new byte[] {(byte) 0xaa, (byte) 0x55})));
cursor.moveToNext();
checkValues(cursor);
@@ -116,7 +122,8 @@
try {
cursor.addRow(new NonIterableArrayList<Object>(
- Arrays.asList("a", 1, 2, 3, 4, 5, "Too many!")));
+ Arrays.asList("a", 1, 2, 3, 4, 5,
+ new byte[] {(byte) 0xaa, (byte) 0x55}, "Too many!")));
fail();
} catch (IllegalArgumentException e) { /* expected */ }
}
@@ -137,7 +144,7 @@
private MatrixCursor newMatrixCursor() {
return new MatrixCursor(new String[] {
- "string", "short", "int", "long", "float", "double" });
+ "string", "short", "int", "long", "float", "double", "blob" });
}
private void checkValues(MatrixCursor cursor) {
@@ -147,6 +154,7 @@
assertEquals(3, cursor.getLong(3));
assertEquals(4.0f, cursor.getFloat(4));
assertEquals(5.0D, cursor.getDouble(5));
+ MoreAsserts.assertEquals(new byte[] {(byte) 0xaa, (byte) 0x55}, cursor.getBlob(6));
}
}
diff --git a/core/tests/coretests/src/android/util/JsonReaderTest.java b/core/tests/coretests/src/android/util/JsonReaderTest.java
index 440aeb5..0b50af3 100644
--- a/core/tests/coretests/src/android/util/JsonReaderTest.java
+++ b/core/tests/coretests/src/android/util/JsonReaderTest.java
@@ -858,7 +858,7 @@
}
public void testFailWithPosition() throws IOException {
- testFailWithPosition("Expected literal value at line 6 column 3",
+ testFailWithPosition("Expected literal value at line 6 column 3",
"[\n\n\n\n\n0,}]");
}
diff --git a/core/tests/coretests/src/android/util/PatternsTest.java b/core/tests/coretests/src/android/util/PatternsTest.java
index aad3fe1..9519b9f 100644
--- a/core/tests/coretests/src/android/util/PatternsTest.java
+++ b/core/tests/coretests/src/android/util/PatternsTest.java
@@ -39,6 +39,10 @@
t = Patterns.TOP_LEVEL_DOMAIN.matcher("xn--0zwm56d").matches();
assertTrue("Missed valid TLD", t);
+ // One of the new top level internationalized domain.
+ t = Patterns.TOP_LEVEL_DOMAIN.matcher("\uD55C\uAD6D").matches();
+ assertTrue("Missed valid TLD", t);
+
t = Patterns.TOP_LEVEL_DOMAIN.matcher("mem").matches();
assertFalse("Matched invalid TLD!", t);
@@ -80,6 +84,9 @@
assertTrue("Valid URL", t);
t = Patterns.WEB_URL.matcher("\uD604\uAE08\uC601\uC218\uC99D.kr").matches();
assertTrue("Valid URL", t);
+ // URL with international TLD.
+ t = Patterns.WEB_URL.matcher("\uB3C4\uBA54\uC778.\uD55C\uAD6D").matches();
+ assertTrue("Valid URL", t);
t = Patterns.WEB_URL.matcher("http://brainstormtech.blogs.fortune.cnn.com/2010/03/11/" +
"top-five-moments-from-eric-schmidt\u2019s-talk-in-abu-dhabi/").matches();
diff --git a/data/fonts/fallback_fonts.xml b/data/fonts/fallback_fonts.xml
index c0d9153..c56a056 100644
--- a/data/fonts/fallback_fonts.xml
+++ b/data/fonts/fallback_fonts.xml
@@ -30,6 +30,11 @@
</family>
<family>
<fileset>
+ <file>DroidSansEthiopic-Regular.ttf</file>
+ </fileset>
+ </family>
+ <family>
+ <fileset>
<file>DroidSansHebrew-Regular.ttf</file>
<file>DroidSansHebrew-Bold.ttf</file>
</fileset>
diff --git a/data/fonts/fonts.mk b/data/fonts/fonts.mk
index 57a1bab..59b4502 100644
--- a/data/fonts/fonts.mk
+++ b/data/fonts/fonts.mk
@@ -18,6 +18,7 @@
frameworks/base/data/fonts/DroidSans.ttf:system/fonts/DroidSans.ttf \
frameworks/base/data/fonts/DroidSans-Bold.ttf:system/fonts/DroidSans-Bold.ttf \
frameworks/base/data/fonts/DroidSansArabic.ttf:system/fonts/DroidSansArabic.ttf \
+ frameworks/base/data/fonts/DroidSansEthiopic-Regular.ttf:system/fonts/DroidSansEthiopic-Regular.ttf \
frameworks/base/data/fonts/DroidSansHebrew-Regular.ttf:system/fonts/DroidSansHebrew-Regular.ttf \
frameworks/base/data/fonts/DroidSansHebrew-Bold.ttf:system/fonts/DroidSansHebrew-Bold.ttf \
frameworks/base/data/fonts/DroidSansThai.ttf:system/fonts/DroidSansThai.ttf \
diff --git a/include/gui/ISurfaceTexture.h b/include/gui/ISurfaceTexture.h
index 5b5b731..37a9b4a 100644
--- a/include/gui/ISurfaceTexture.h
+++ b/include/gui/ISurfaceTexture.h
@@ -87,6 +87,7 @@
virtual status_t setCrop(const Rect& reg) = 0;
virtual status_t setTransform(uint32_t transform) = 0;
+ virtual status_t setScalingMode(int mode) = 0;
// getAllocator retrieves the binder object that must be referenced as long
// as the GraphicBuffers dequeued from this ISurfaceTexture are referenced.
diff --git a/include/gui/SurfaceTexture.h b/include/gui/SurfaceTexture.h
index 4080f27..e46765e 100644
--- a/include/gui/SurfaceTexture.h
+++ b/include/gui/SurfaceTexture.h
@@ -88,6 +88,7 @@
virtual void cancelBuffer(int buf);
virtual status_t setCrop(const Rect& reg);
virtual status_t setTransform(uint32_t transform);
+ virtual status_t setScalingMode(int mode);
virtual int query(int what, int* value);
@@ -185,6 +186,9 @@
// getCurrentTransform returns the transform of the current buffer
uint32_t getCurrentTransform() const;
+ // getCurrentScalingMode returns the scaling mode of the current buffer
+ uint32_t getCurrentScalingMode() const;
+
// dump our state in a String
void dump(String8& result) const;
void dump(String8& result, const char* prefix, char* buffer, size_t SIZE) const;
@@ -220,6 +224,7 @@
mBufferState(BufferSlot::FREE),
mRequestBufferCalled(false),
mTransform(0),
+ mScalingMode(NATIVE_WINDOW_SCALING_MODE_FREEZE),
mTimestamp(0) {
mCrop.makeInvalid();
}
@@ -281,6 +286,11 @@
// slot.
uint32_t mTransform;
+ // mScalingMode is the current scaling mode for this buffer slot. This
+ // gets set to mNextScalingMode each time queueBuffer gets called for
+ // this slot.
+ uint32_t mScalingMode;
+
// mTimestamp is the current timestamp for this buffer slot. This gets
// to set by queueBuffer each time this slot is queued.
int64_t mTimestamp;
@@ -337,20 +347,24 @@
sp<GraphicBuffer> mCurrentTextureBuf;
// mCurrentCrop is the crop rectangle that applies to the current texture.
- // It gets set to mLastQueuedCrop each time updateTexImage is called.
+ // It gets set each time updateTexImage is called.
Rect mCurrentCrop;
// mCurrentTransform is the transform identifier for the current texture. It
- // gets set to mLastQueuedTransform each time updateTexImage is called.
+ // gets set each time updateTexImage is called.
uint32_t mCurrentTransform;
+ // mCurrentScalingMode is the scaling mode for the current texture. It gets
+ // set to each time updateTexImage is called.
+ uint32_t mCurrentScalingMode;
+
// mCurrentTransformMatrix is the transform matrix for the current texture.
// It gets computed by computeTransformMatrix each time updateTexImage is
// called.
float mCurrentTransformMatrix[16];
// mCurrentTimestamp is the timestamp for the current texture. It
- // gets set to mLastQueuedTimestamp each time updateTexImage is called.
+ // gets set each time updateTexImage is called.
int64_t mCurrentTimestamp;
// mNextCrop is the crop rectangle that will be used for the next buffer
@@ -361,6 +375,10 @@
// buffer that gets queued. It is set by calling setTransform.
uint32_t mNextTransform;
+ // mNextScalingMode is the scaling mode that will be used for the next
+ // buffers that get queued. It is set by calling setScalingMode.
+ int mNextScalingMode;
+
// mTexName is the name of the OpenGL texture to which streamed images will
// be bound when updateTexImage is called. It is set at construction time
// changed with a call to setTexName.
diff --git a/include/gui/SurfaceTextureClient.h b/include/gui/SurfaceTextureClient.h
index cfe2aa1..76e7119 100644
--- a/include/gui/SurfaceTextureClient.h
+++ b/include/gui/SurfaceTextureClient.h
@@ -63,6 +63,7 @@
int dispatchSetBuffersGeometry(va_list args);
int dispatchSetBuffersDimensions(va_list args);
int dispatchSetBuffersFormat(va_list args);
+ int dispatchSetScalingMode(va_list args);
int dispatchSetBuffersTransform(va_list args);
int dispatchSetBuffersTimestamp(va_list args);
int dispatchSetCrop(va_list args);
@@ -84,6 +85,7 @@
virtual int setBufferCount(int bufferCount);
virtual int setBuffersDimensions(int w, int h);
virtual int setBuffersFormat(int format);
+ virtual int setScalingMode(int mode);
virtual int setBuffersTransform(int transform);
virtual int setBuffersTimestamp(int64_t timestamp);
virtual int setCrop(Rect const* rect);
diff --git a/libs/gui/ISurfaceTexture.cpp b/libs/gui/ISurfaceTexture.cpp
index 41434a4..b4b7492 100644
--- a/libs/gui/ISurfaceTexture.cpp
+++ b/libs/gui/ISurfaceTexture.cpp
@@ -43,6 +43,7 @@
SET_SYNCHRONOUS_MODE,
CONNECT,
DISCONNECT,
+ SET_SCALING_MODE,
};
@@ -130,6 +131,15 @@
return result;
}
+ virtual status_t setScalingMode(int mode) {
+ Parcel data, reply;
+ data.writeInterfaceToken(ISurfaceTexture::getInterfaceDescriptor());
+ data.writeInt32(mode);
+ remote()->transact(SET_SCALING_MODE, data, &reply);
+ status_t result = reply.readInt32();
+ return result;
+ }
+
virtual sp<IBinder> getAllocator() {
Parcel data, reply;
data.writeInterfaceToken(ISurfaceTexture::getInterfaceDescriptor());
@@ -244,6 +254,13 @@
reply->writeInt32(result);
return NO_ERROR;
} break;
+ case SET_SCALING_MODE: {
+ CHECK_INTERFACE(ISurfaceTexture, data, reply);
+ int mode = data.readInt32();
+ status_t result = setScalingMode(mode);
+ reply->writeInt32(result);
+ return NO_ERROR;
+ } break;
case GET_ALLOCATOR: {
CHECK_INTERFACE(ISurfaceTexture, data, reply);
sp<IBinder> result = getAllocator();
diff --git a/libs/gui/SurfaceTexture.cpp b/libs/gui/SurfaceTexture.cpp
index a12d40a..3ab6c79 100644
--- a/libs/gui/SurfaceTexture.cpp
+++ b/libs/gui/SurfaceTexture.cpp
@@ -90,6 +90,7 @@
mCurrentTransform(0),
mCurrentTimestamp(0),
mNextTransform(0),
+ mNextScalingMode(NATIVE_WINDOW_SCALING_MODE_FREEZE),
mTexName(tex),
mSynchronousMode(false),
mAllowSynchronousMode(allowSynchronousMode),
@@ -453,6 +454,7 @@
mSlots[buf].mBufferState = BufferSlot::QUEUED;
mSlots[buf].mCrop = mNextCrop;
mSlots[buf].mTransform = mNextTransform;
+ mSlots[buf].mScalingMode = mNextScalingMode;
mSlots[buf].mTimestamp = timestamp;
mDequeueCondition.signal();
} // scope for the lock
@@ -542,6 +544,22 @@
return err;
}
+status_t SurfaceTexture::setScalingMode(int mode) {
+ LOGV("SurfaceTexture::setScalingMode(%d)", mode);
+
+ switch (mode) {
+ case NATIVE_WINDOW_SCALING_MODE_FREEZE:
+ case NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW:
+ break;
+ default:
+ return BAD_VALUE;
+ }
+
+ Mutex::Autolock lock(mMutex);
+ mNextScalingMode = mode;
+ return OK;
+}
+
status_t SurfaceTexture::updateTexImage() {
LOGV("SurfaceTexture::updateTexImage");
Mutex::Autolock lock(mMutex);
@@ -602,6 +620,7 @@
mCurrentTextureBuf = mSlots[buf].mGraphicBuffer;
mCurrentCrop = mSlots[buf].mCrop;
mCurrentTransform = mSlots[buf].mTransform;
+ mCurrentScalingMode = mSlots[buf].mScalingMode;
mCurrentTimestamp = mSlots[buf].mTimestamp;
computeCurrentTransformMatrix();
@@ -809,6 +828,11 @@
return mCurrentTransform;
}
+uint32_t SurfaceTexture::getCurrentScalingMode() const {
+ Mutex::Autolock lock(mMutex);
+ return mCurrentScalingMode;
+}
+
int SurfaceTexture::query(int what, int* outValue)
{
Mutex::Autolock lock(mMutex);
diff --git a/libs/gui/SurfaceTextureClient.cpp b/libs/gui/SurfaceTextureClient.cpp
index d5b7c89..68475e9 100644
--- a/libs/gui/SurfaceTextureClient.cpp
+++ b/libs/gui/SurfaceTextureClient.cpp
@@ -286,6 +286,9 @@
case NATIVE_WINDOW_UNLOCK_AND_POST:
res = dispatchUnlockAndPost(args);
break;
+ case NATIVE_WINDOW_SET_SCALING_MODE:
+ res = dispatchSetScalingMode(args);
+ break;
default:
res = NAME_NOT_FOUND;
break;
@@ -340,6 +343,11 @@
return setBuffersFormat(f);
}
+int SurfaceTextureClient::dispatchSetScalingMode(va_list args) {
+ int m = va_arg(args, int);
+ return setScalingMode(m);
+}
+
int SurfaceTextureClient::dispatchSetBuffersTransform(va_list args) {
int transform = va_arg(args, int);
return setBuffersTransform(transform);
@@ -456,6 +464,18 @@
return NO_ERROR;
}
+int SurfaceTextureClient::setScalingMode(int mode)
+{
+ LOGV("SurfaceTextureClient::setScalingMode(%d)", mode);
+ Mutex::Autolock lock(mMutex);
+ // mode is validated on the server
+ status_t err = mSurfaceTexture->setScalingMode(mode);
+ LOGE_IF(err, "ISurfaceTexture::setScalingMode(%d) returned %s",
+ mode, strerror(-err));
+
+ return err;
+}
+
int SurfaceTextureClient::setBuffersTransform(int transform)
{
LOGV("SurfaceTextureClient::setBuffersTransform");
diff --git a/libs/ui/FramebufferNativeWindow.cpp b/libs/ui/FramebufferNativeWindow.cpp
index 794747d..e2772a7 100644
--- a/libs/ui/FramebufferNativeWindow.cpp
+++ b/libs/ui/FramebufferNativeWindow.cpp
@@ -299,18 +299,38 @@
{
switch (operation) {
case NATIVE_WINDOW_SET_USAGE:
- case NATIVE_WINDOW_SET_BUFFERS_FORMAT:
+ // TODO: we should implement this
+ return NO_ERROR;
case NATIVE_WINDOW_CONNECT:
+ // TODO: we should implement this
+ return NO_ERROR;
case NATIVE_WINDOW_DISCONNECT:
- break;
+ // TODO: we should implement this
+ return NO_ERROR;
case NATIVE_WINDOW_LOCK:
return INVALID_OPERATION;
case NATIVE_WINDOW_UNLOCK_AND_POST:
return INVALID_OPERATION;
- default:
- return NAME_NOT_FOUND;
+ case NATIVE_WINDOW_SET_CROP:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFER_COUNT:
+ // TODO: we should implement this
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFERS_GEOMETRY:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFERS_TRANSFORM:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFERS_TIMESTAMP:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFERS_DIMENSIONS:
+ return INVALID_OPERATION;
+ case NATIVE_WINDOW_SET_BUFFERS_FORMAT:
+ // TODO: we should implement this
+ return NO_ERROR;
+ case NATIVE_WINDOW_SET_SCALING_MODE:
+ return INVALID_OPERATION;
}
- return NO_ERROR;
+ return NAME_NOT_FOUND;
}
// ----------------------------------------------------------------------------
diff --git a/media/libstagefright/ACodec.cpp b/media/libstagefright/ACodec.cpp
index d4d07b2..174ec92 100644
--- a/media/libstagefright/ACodec.cpp
+++ b/media/libstagefright/ACodec.cpp
@@ -445,6 +445,13 @@
return err;
}
+ err = native_window_set_scaling_mode(mNativeWindow.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+
+ if (err != OK) {
+ return err;
+ }
+
err = native_window_set_buffers_geometry(
mNativeWindow.get(),
def.format.video.nFrameWidth,
diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
index 4f8336e..7bcbdcf 100755
--- a/media/libstagefright/OMXCodec.cpp
+++ b/media/libstagefright/OMXCodec.cpp
@@ -1765,6 +1765,13 @@
return err;
}
+ err = native_window_set_scaling_mode(mNativeWindow.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
+
+ if (err != OK) {
+ return err;
+ }
+
err = native_window_set_buffers_geometry(
mNativeWindow.get(),
def.format.video.nFrameWidth,
diff --git a/media/libstagefright/codecs/avc/dec/Android.mk b/media/libstagefright/codecs/avc/dec/Android.mk
deleted file mode 100644
index 2949a04..0000000
--- a/media/libstagefright/codecs/avc/dec/Android.mk
+++ /dev/null
@@ -1,55 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- src/avcdec_api.cpp \
- src/avc_bitstream.cpp \
- src/header.cpp \
- src/itrans.cpp \
- src/pred_inter.cpp \
- src/pred_intra.cpp \
- src/residual.cpp \
- src/slice.cpp \
- src/vlc.cpp
-
-LOCAL_MODULE := libstagefright_avcdec
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/src \
- $(LOCAL_PATH)/include \
- $(LOCAL_PATH)/../common/include \
- $(TOP)/frameworks/base/media/libstagefright/include \
- frameworks/base/include/media/stagefright/openmax \
-
-LOCAL_CFLAGS := -DOSCL_IMPORT_REF= -DOSCL_UNUSED_ARG= -DOSCL_EXPORT_REF=
-
-include $(BUILD_STATIC_LIBRARY)
-
-################################################################################
-
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := \
- SoftAVC.cpp
-
-LOCAL_C_INCLUDES := \
- $(LOCAL_PATH)/src \
- $(LOCAL_PATH)/include \
- $(LOCAL_PATH)/../common/include \
- frameworks/base/media/libstagefright/include \
- frameworks/base/include/media/stagefright/openmax \
-
-LOCAL_CFLAGS := -DOSCL_IMPORT_REF=
-
-LOCAL_STATIC_LIBRARIES := \
- libstagefright_avcdec
-
-LOCAL_SHARED_LIBRARIES := \
- libstagefright_avc_common \
- libstagefright libstagefright_omx libstagefright_foundation libutils
-
-LOCAL_MODULE := libstagefright_soft_avcdec
-LOCAL_MODULE_TAGS := optional
-
-include $(BUILD_SHARED_LIBRARY)
-
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.cpp b/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
deleted file mode 100644
index 6a476f6..0000000
--- a/media/libstagefright/codecs/avc/dec/SoftAVC.cpp
+++ /dev/null
@@ -1,720 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-//#define LOG_NDEBUG 0
-#define LOG_TAG "SoftAVC"
-#include <utils/Log.h>
-
-#include "SoftAVC.h"
-
-#include <media/stagefright/foundation/ADebug.h>
-#include <media/stagefright/MediaDefs.h>
-#include <media/stagefright/MediaErrors.h>
-#include <media/IOMX.h>
-
-#include "avcdec_api.h"
-#include "avcdec_int.h"
-
-namespace android {
-
-static const char kStartCode[4] = { 0x00, 0x00, 0x00, 0x01 };
-
-static const CodecProfileLevel kProfileLevels[] = {
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel1b },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel11 },
- { OMX_VIDEO_AVCProfileBaseline, OMX_VIDEO_AVCLevel12 },
-};
-
-template<class T>
-static void InitOMXParams(T *params) {
- params->nSize = sizeof(T);
- params->nVersion.s.nVersionMajor = 1;
- params->nVersion.s.nVersionMinor = 0;
- params->nVersion.s.nRevision = 0;
- params->nVersion.s.nStep = 0;
-}
-
-static int32_t Malloc(void *userData, int32_t size, int32_t attrs) {
- return reinterpret_cast<int32_t>(malloc(size));
-}
-
-static void Free(void *userData, int32_t ptr) {
- free(reinterpret_cast<void *>(ptr));
-}
-
-SoftAVC::SoftAVC(
- const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component)
- : SimpleSoftOMXComponent(name, callbacks, appData, component),
- mHandle(new tagAVCHandle),
- mInputBufferCount(0),
- mWidth(160),
- mHeight(120),
- mCropLeft(0),
- mCropTop(0),
- mCropRight(mWidth - 1),
- mCropBottom(mHeight - 1),
- mSPSSeen(false),
- mPPSSeen(false),
- mCurrentTimeUs(-1),
- mEOSStatus(INPUT_DATA_AVAILABLE),
- mOutputPortSettingsChange(NONE) {
- initPorts();
- CHECK_EQ(initDecoder(), (status_t)OK);
-}
-
-SoftAVC::~SoftAVC() {
- PVAVCCleanUpDecoder(mHandle);
-
- delete mHandle;
- mHandle = NULL;
-}
-
-void SoftAVC::initPorts() {
- OMX_PARAM_PORTDEFINITIONTYPE def;
- InitOMXParams(&def);
-
- def.nPortIndex = 0;
- def.eDir = OMX_DirInput;
- def.nBufferCountMin = kNumInputBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.nBufferSize = 8192;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 1;
-
- def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC);
- def.format.video.pNativeRender = NULL;
- def.format.video.nFrameWidth = mWidth;
- def.format.video.nFrameHeight = mHeight;
- def.format.video.nStride = def.format.video.nFrameWidth;
- def.format.video.nSliceHeight = def.format.video.nFrameHeight;
- def.format.video.nBitrate = 0;
- def.format.video.xFramerate = 0;
- def.format.video.bFlagErrorConcealment = OMX_FALSE;
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC;
- def.format.video.eColorFormat = OMX_COLOR_FormatUnused;
- def.format.video.pNativeWindow = NULL;
-
- addPort(def);
-
- def.nPortIndex = 1;
- def.eDir = OMX_DirOutput;
- def.nBufferCountMin = kNumOutputBuffers;
- def.nBufferCountActual = def.nBufferCountMin;
- def.bEnabled = OMX_TRUE;
- def.bPopulated = OMX_FALSE;
- def.eDomain = OMX_PortDomainVideo;
- def.bBuffersContiguous = OMX_FALSE;
- def.nBufferAlignment = 2;
-
- def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW);
- def.format.video.pNativeRender = NULL;
- def.format.video.nFrameWidth = mWidth;
- def.format.video.nFrameHeight = mHeight;
- def.format.video.nStride = def.format.video.nFrameWidth;
- def.format.video.nSliceHeight = def.format.video.nFrameHeight;
- def.format.video.nBitrate = 0;
- def.format.video.xFramerate = 0;
- def.format.video.bFlagErrorConcealment = OMX_FALSE;
- def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused;
- def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar;
- def.format.video.pNativeWindow = NULL;
-
- def.nBufferSize =
- (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2;
-
- addPort(def);
-}
-
-status_t SoftAVC::initDecoder() {
- memset(mHandle, 0, sizeof(tagAVCHandle));
- mHandle->AVCObject = NULL;
- mHandle->userData = this;
- mHandle->CBAVC_DPBAlloc = ActivateSPSWrapper;
- mHandle->CBAVC_FrameBind = BindFrameWrapper;
- mHandle->CBAVC_FrameUnbind = UnbindFrame;
- mHandle->CBAVC_Malloc = Malloc;
- mHandle->CBAVC_Free = Free;
-
- return OK;
-}
-
-OMX_ERRORTYPE SoftAVC::internalGetParameter(
- OMX_INDEXTYPE index, OMX_PTR params) {
- switch (index) {
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex != 0) {
- return OMX_ErrorNoMore;
- }
-
- if (formatParams->nPortIndex == 0) {
- formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC;
- formatParams->eColorFormat = OMX_COLOR_FormatUnused;
- formatParams->xFramerate = 0;
- } else {
- CHECK_EQ(formatParams->nPortIndex, 1u);
-
- formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused;
- formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar;
- formatParams->xFramerate = 0;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoProfileLevelQuerySupported:
- {
- OMX_VIDEO_PARAM_PROFILELEVELTYPE *profileLevel =
- (OMX_VIDEO_PARAM_PROFILELEVELTYPE *) params;
-
- if (profileLevel->nPortIndex != 0) { // Input port only
- LOGE("Invalid port index: %ld", profileLevel->nPortIndex);
- return OMX_ErrorUnsupportedIndex;
- }
-
- size_t index = profileLevel->nProfileIndex;
- size_t nProfileLevels =
- sizeof(kProfileLevels) / sizeof(kProfileLevels[0]);
- if (index >= nProfileLevels) {
- return OMX_ErrorNoMore;
- }
-
- profileLevel->eProfile = kProfileLevels[index].mProfile;
- profileLevel->eLevel = kProfileLevels[index].mLevel;
- return OMX_ErrorNone;
- }
-
- default:
- return SimpleSoftOMXComponent::internalGetParameter(index, params);
- }
-}
-
-OMX_ERRORTYPE SoftAVC::internalSetParameter(
- OMX_INDEXTYPE index, const OMX_PTR params) {
- switch (index) {
- case OMX_IndexParamStandardComponentRole:
- {
- const OMX_PARAM_COMPONENTROLETYPE *roleParams =
- (const OMX_PARAM_COMPONENTROLETYPE *)params;
-
- if (strncmp((const char *)roleParams->cRole,
- "video_decoder.avc",
- OMX_MAX_STRINGNAME_SIZE - 1)) {
- return OMX_ErrorUndefined;
- }
-
- return OMX_ErrorNone;
- }
-
- case OMX_IndexParamVideoPortFormat:
- {
- OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams =
- (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params;
-
- if (formatParams->nPortIndex > 1) {
- return OMX_ErrorUndefined;
- }
-
- if (formatParams->nIndex != 0) {
- return OMX_ErrorNoMore;
- }
-
- return OMX_ErrorNone;
- }
-
- default:
- return SimpleSoftOMXComponent::internalSetParameter(index, params);
- }
-}
-
-OMX_ERRORTYPE SoftAVC::getConfig(
- OMX_INDEXTYPE index, OMX_PTR params) {
- switch (index) {
- case OMX_IndexConfigCommonOutputCrop:
- {
- OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params;
-
- if (rectParams->nPortIndex != 1) {
- return OMX_ErrorUndefined;
- }
-
- rectParams->nLeft = mCropLeft;
- rectParams->nTop = mCropTop;
- rectParams->nWidth = mCropRight - mCropLeft + 1;
- rectParams->nHeight = mCropBottom - mCropTop + 1;
-
- return OMX_ErrorNone;
- }
-
- default:
- return OMX_ErrorUnsupportedIndex;
- }
-}
-
-static void findNALFragment(
- const OMX_BUFFERHEADERTYPE *inHeader,
- const uint8_t **fragPtr, size_t *fragSize) {
- const uint8_t *data = inHeader->pBuffer + inHeader->nOffset;
-
- size_t size = inHeader->nFilledLen;
-
- CHECK(size >= 4);
- CHECK(!memcmp(kStartCode, data, 4));
-
- size_t offset = 4;
- while (offset + 3 < size && memcmp(kStartCode, &data[offset], 4)) {
- ++offset;
- }
-
- *fragPtr = &data[4];
- if (offset + 3 >= size) {
- *fragSize = size - 4;
- } else {
- *fragSize = offset - 4;
- }
-}
-
-void SoftAVC::onQueueFilled(OMX_U32 portIndex) {
- if (mOutputPortSettingsChange != NONE) {
- return;
- }
-
- List<BufferInfo *> &inQueue = getPortQueue(0);
- List<BufferInfo *> &outQueue = getPortQueue(1);
-
- if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) {
- return;
- }
-
- while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty())
- && outQueue.size() == kNumOutputBuffers) {
- if (mEOSStatus == INPUT_EOS_SEEN) {
- OMX_BUFFERHEADERTYPE *outHeader;
- if (drainOutputBuffer(&outHeader)) {
- List<BufferInfo *>::iterator it = outQueue.begin();
- while ((*it)->mHeader != outHeader) {
- ++it;
- }
-
- BufferInfo *outInfo = *it;
- outInfo->mOwnedByUs = false;
- outQueue.erase(it);
- outInfo = NULL;
-
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
- return;
- }
-
- BufferInfo *outInfo = *outQueue.begin();
- outHeader = outInfo->mHeader;
-
- outHeader->nOffset = 0;
- outHeader->nFilledLen = 0;
- outHeader->nFlags = OMX_BUFFERFLAG_EOS;
- outHeader->nTimeStamp = 0;
-
- outQueue.erase(outQueue.begin());
- outInfo->mOwnedByUs = false;
- notifyFillBufferDone(outHeader);
-
- mEOSStatus = OUTPUT_FRAMES_FLUSHED;
- return;
- }
-
- BufferInfo *inInfo = *inQueue.begin();
- OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader;
-
- if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) {
- inQueue.erase(inQueue.begin());
- inInfo->mOwnedByUs = false;
- notifyEmptyBufferDone(inHeader);
-
- mEOSStatus = INPUT_EOS_SEEN;
- continue;
- }
-
- mCurrentTimeUs = inHeader->nTimeStamp;
-
- const uint8_t *fragPtr;
- size_t fragSize;
- findNALFragment(inHeader, &fragPtr, &fragSize);
-
- bool releaseFragment;
- OMX_BUFFERHEADERTYPE *outHeader;
- status_t err = decodeFragment(
- fragPtr, fragSize,
- &releaseFragment, &outHeader);
-
- if (releaseFragment) {
- CHECK_GE(inHeader->nFilledLen, fragSize + 4);
-
- inHeader->nOffset += fragSize + 4;
- inHeader->nFilledLen -= fragSize + 4;
-
- if (inHeader->nFilledLen == 0) {
- inInfo->mOwnedByUs = false;
- inQueue.erase(inQueue.begin());
- inInfo = NULL;
- notifyEmptyBufferDone(inHeader);
- inHeader = NULL;
- }
- }
-
- if (outHeader != NULL) {
- List<BufferInfo *>::iterator it = outQueue.begin();
- while ((*it)->mHeader != outHeader) {
- ++it;
- }
-
- BufferInfo *outInfo = *it;
- outInfo->mOwnedByUs = false;
- outQueue.erase(it);
- outInfo = NULL;
-
- notifyFillBufferDone(outHeader);
- outHeader = NULL;
- return;
- }
-
- if (err == INFO_FORMAT_CHANGED) {
- return;
- }
-
- if (err != OK) {
- notify(OMX_EventError, OMX_ErrorUndefined, err, NULL);
- return;
- }
- }
-}
-
-status_t SoftAVC::decodeFragment(
- const uint8_t *fragPtr, size_t fragSize,
- bool *releaseFragment,
- OMX_BUFFERHEADERTYPE **outHeader) {
- *releaseFragment = true;
- *outHeader = NULL;
-
- int nalType;
- int nalRefIdc;
- AVCDec_Status res =
- PVAVCDecGetNALType(
- const_cast<uint8_t *>(fragPtr), fragSize,
- &nalType, &nalRefIdc);
-
- if (res != AVCDEC_SUCCESS) {
- LOGV("cannot determine nal type");
- return ERROR_MALFORMED;
- }
-
- if (nalType != AVC_NALTYPE_SPS && nalType != AVC_NALTYPE_PPS
- && (!mSPSSeen || !mPPSSeen)) {
- // We haven't seen SPS or PPS yet.
- return OK;
- }
-
- switch (nalType) {
- case AVC_NALTYPE_SPS:
- {
- mSPSSeen = true;
-
- res = PVAVCDecSeqParamSet(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- return ERROR_MALFORMED;
- }
-
- AVCDecObject *pDecVid = (AVCDecObject *)mHandle->AVCObject;
-
- int32_t width =
- (pDecVid->seqParams[0]->pic_width_in_mbs_minus1 + 1) * 16;
-
- int32_t height =
- (pDecVid->seqParams[0]->pic_height_in_map_units_minus1 + 1) * 16;
-
- int32_t crop_left, crop_right, crop_top, crop_bottom;
- if (pDecVid->seqParams[0]->frame_cropping_flag)
- {
- crop_left = 2 * pDecVid->seqParams[0]->frame_crop_left_offset;
- crop_right =
- width - (2 * pDecVid->seqParams[0]->frame_crop_right_offset + 1);
-
- if (pDecVid->seqParams[0]->frame_mbs_only_flag)
- {
- crop_top = 2 * pDecVid->seqParams[0]->frame_crop_top_offset;
- crop_bottom =
- height -
- (2 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
- }
- else
- {
- crop_top = 4 * pDecVid->seqParams[0]->frame_crop_top_offset;
- crop_bottom =
- height -
- (4 * pDecVid->seqParams[0]->frame_crop_bottom_offset + 1);
- }
- } else {
- crop_bottom = height - 1;
- crop_right = width - 1;
- crop_top = crop_left = 0;
- }
-
- status_t err = OK;
-
- if (mWidth != width || mHeight != height) {
- mWidth = width;
- mHeight = height;
-
- updatePortDefinitions();
-
- notify(OMX_EventPortSettingsChanged, 1, 0, NULL);
- mOutputPortSettingsChange = AWAITING_DISABLED;
-
- err = INFO_FORMAT_CHANGED;
- }
-
- if (mCropLeft != crop_left
- || mCropTop != crop_top
- || mCropRight != crop_right
- || mCropBottom != crop_bottom) {
- mCropLeft = crop_left;
- mCropTop = crop_top;
- mCropRight = crop_right;
- mCropBottom = crop_bottom;
-
- notify(OMX_EventPortSettingsChanged,
- 1,
- OMX_IndexConfigCommonOutputCrop,
- NULL);
- }
-
- return err;
- }
-
- case AVC_NALTYPE_PPS:
- {
- mPPSSeen = true;
-
- res = PVAVCDecPicParamSet(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- LOGV("PVAVCDecPicParamSet returned error %d", res);
- return ERROR_MALFORMED;
- }
-
- return OK;
- }
-
- case AVC_NALTYPE_SLICE:
- case AVC_NALTYPE_IDR:
- {
- res = PVAVCDecodeSlice(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res == AVCDEC_PICTURE_OUTPUT_READY) {
- *releaseFragment = false;
-
- if (!drainOutputBuffer(outHeader)) {
- return UNKNOWN_ERROR;
- }
-
- return OK;
- }
-
- if (res == AVCDEC_PICTURE_READY || res == AVCDEC_SUCCESS) {
- return OK;
- } else {
- LOGV("PVAVCDecodeSlice returned error %d", res);
- return ERROR_MALFORMED;
- }
- }
-
- case AVC_NALTYPE_SEI:
- {
- res = PVAVCDecSEI(
- mHandle, const_cast<uint8_t *>(fragPtr),
- fragSize);
-
- if (res != AVCDEC_SUCCESS) {
- return ERROR_MALFORMED;
- }
-
- return OK;
- }
-
- case AVC_NALTYPE_AUD:
- case AVC_NALTYPE_FILL:
- case AVC_NALTYPE_EOSEQ:
- {
- return OK;
- }
-
- default:
- {
- LOGE("Should not be here, unknown nalType %d", nalType);
-
- return ERROR_MALFORMED;
- }
- }
-
- return OK;
-}
-
-bool SoftAVC::drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader) {
- int32_t index;
- int32_t Release;
- AVCFrameIO Output;
- Output.YCbCr[0] = Output.YCbCr[1] = Output.YCbCr[2] = NULL;
- AVCDec_Status status =
- PVAVCDecGetOutput(mHandle, &index, &Release, &Output);
-
- if (status != AVCDEC_SUCCESS) {
- return false;
- }
-
- PortInfo *port = editPortInfo(1);
- CHECK_GE(index, 0);
- CHECK_LT((size_t)index, port->mBuffers.size());
- CHECK(port->mBuffers.editItemAt(index).mOwnedByUs);
-
- *outHeader = port->mBuffers.editItemAt(index).mHeader;
- (*outHeader)->nOffset = 0;
- (*outHeader)->nFilledLen = port->mDef.nBufferSize;
- (*outHeader)->nFlags = 0;
-
- return true;
-}
-
-void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) {
- if (portIndex == 0) {
- PVAVCDecReset(mHandle);
-
- mEOSStatus = INPUT_DATA_AVAILABLE;
- }
-}
-
-void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) {
- if (portIndex != 1) {
- return;
- }
-
- switch (mOutputPortSettingsChange) {
- case NONE:
- break;
-
- case AWAITING_DISABLED:
- {
- CHECK(!enabled);
- mOutputPortSettingsChange = AWAITING_ENABLED;
- break;
- }
-
- default:
- {
- CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED);
- CHECK(enabled);
- mOutputPortSettingsChange = NONE;
- break;
- }
- }
-}
-
-void SoftAVC::updatePortDefinitions() {
- OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def = &editPortInfo(1)->mDef;
- def->format.video.nFrameWidth = mWidth;
- def->format.video.nFrameHeight = mHeight;
- def->format.video.nStride = def->format.video.nFrameWidth;
- def->format.video.nSliceHeight = def->format.video.nFrameHeight;
-
- def->nBufferSize =
- (def->format.video.nFrameWidth
- * def->format.video.nFrameHeight * 3) / 2;
-}
-
-// static
-int32_t SoftAVC::ActivateSPSWrapper(
- void *userData, unsigned int sizeInMbs, unsigned int numBuffers) {
- return static_cast<SoftAVC *>(userData)->activateSPS(sizeInMbs, numBuffers);
-}
-
-// static
-int32_t SoftAVC::BindFrameWrapper(
- void *userData, int32_t index, uint8_t **yuv) {
- return static_cast<SoftAVC *>(userData)->bindFrame(index, yuv);
-}
-
-// static
-void SoftAVC::UnbindFrame(void *userData, int32_t index) {
-}
-
-int32_t SoftAVC::activateSPS(
- unsigned int sizeInMbs, unsigned int numBuffers) {
- PortInfo *port = editPortInfo(1);
- CHECK_GE(port->mBuffers.size(), numBuffers);
- CHECK_GE(port->mDef.nBufferSize, (sizeInMbs << 7) * 3);
-
- return 1;
-}
-
-int32_t SoftAVC::bindFrame(int32_t index, uint8_t **yuv) {
- PortInfo *port = editPortInfo(1);
-
- CHECK_GE(index, 0);
- CHECK_LT((size_t)index, port->mBuffers.size());
-
- BufferInfo *outBuffer =
- &port->mBuffers.editItemAt(index);
-
- CHECK(outBuffer->mOwnedByUs);
-
- outBuffer->mHeader->nTimeStamp = mCurrentTimeUs;
- *yuv = outBuffer->mHeader->pBuffer;
-
- return 1;
-}
-
-} // namespace android
-
-android::SoftOMXComponent *createSoftOMXComponent(
- const char *name, const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData, OMX_COMPONENTTYPE **component) {
- return new android::SoftAVC(name, callbacks, appData, component);
-}
diff --git a/media/libstagefright/codecs/avc/dec/SoftAVC.h b/media/libstagefright/codecs/avc/dec/SoftAVC.h
deleted file mode 100644
index 1594b4d..0000000
--- a/media/libstagefright/codecs/avc/dec/SoftAVC.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Copyright (C) 2011 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#ifndef SOFT_AVC_H_
-
-#define SOFT_AVC_H_
-
-#include "SimpleSoftOMXComponent.h"
-
-struct tagAVCHandle;
-
-namespace android {
-
-struct SoftAVC : public SimpleSoftOMXComponent {
- SoftAVC(const char *name,
- const OMX_CALLBACKTYPE *callbacks,
- OMX_PTR appData,
- OMX_COMPONENTTYPE **component);
-
-protected:
- virtual ~SoftAVC();
-
- virtual OMX_ERRORTYPE internalGetParameter(
- OMX_INDEXTYPE index, OMX_PTR params);
-
- virtual OMX_ERRORTYPE internalSetParameter(
- OMX_INDEXTYPE index, const OMX_PTR params);
-
- virtual OMX_ERRORTYPE getConfig(OMX_INDEXTYPE index, OMX_PTR params);
-
- virtual void onQueueFilled(OMX_U32 portIndex);
- virtual void onPortFlushCompleted(OMX_U32 portIndex);
- virtual void onPortEnableCompleted(OMX_U32 portIndex, bool enabled);
-
-private:
- enum {
- kNumInputBuffers = 4,
- kNumOutputBuffers = 18,
- };
-
- enum EOSStatus {
- INPUT_DATA_AVAILABLE,
- INPUT_EOS_SEEN,
- OUTPUT_FRAMES_FLUSHED,
- };
-
- tagAVCHandle *mHandle;
-
- size_t mInputBufferCount;
-
- int32_t mWidth, mHeight;
- int32_t mCropLeft, mCropTop, mCropRight, mCropBottom;
-
- bool mSPSSeen, mPPSSeen;
-
- int64_t mCurrentTimeUs;
-
- EOSStatus mEOSStatus;
-
- enum {
- NONE,
- AWAITING_DISABLED,
- AWAITING_ENABLED
- } mOutputPortSettingsChange;
-
- void initPorts();
- status_t initDecoder();
-
- status_t decodeFragment(
- const uint8_t *fragPtr, size_t fragSize,
- bool *releaseFrames,
- OMX_BUFFERHEADERTYPE **outHeader);
-
- void updatePortDefinitions();
- bool drainOutputBuffer(OMX_BUFFERHEADERTYPE **outHeader);
-
- static int32_t ActivateSPSWrapper(
- void *userData, unsigned int sizeInMbs, unsigned int numBuffers);
-
- static int32_t BindFrameWrapper(
- void *userData, int32_t index, uint8_t **yuv);
-
- static void UnbindFrame(void *userData, int32_t index);
-
- int32_t activateSPS(
- unsigned int sizeInMbs, unsigned int numBuffers);
-
- int32_t bindFrame(int32_t index, uint8_t **yuv);
-
- DISALLOW_EVIL_CONSTRUCTORS(SoftAVC);
-};
-
-} // namespace android
-
-#endif // SOFT_AVC_H_
-
diff --git a/media/libstagefright/codecs/avc/dec/include/avcdec_api.h b/media/libstagefright/codecs/avc/dec/include/avcdec_api.h
deleted file mode 100644
index f6a14b7..0000000
--- a/media/libstagefright/codecs/avc/dec/include/avcdec_api.h
+++ /dev/null
@@ -1,200 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library
-and necessary type defitionitions and enumerations.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_API_H_
-#define _AVCDEC_API_H_
-
-#include "avcapi_common.h"
-
-/**
- This enumeration is used for the status returned from the library interface.
-*/
-typedef enum
-{
- /**
- The followings are fail with details. Their values are negative.
- */
- AVCDEC_NO_DATA = -4,
- AVCDEC_PACKET_LOSS = -3,
- /**
- Fail information
- */
- AVCDEC_NO_BUFFER = -2, /* no output picture buffer available */
- AVCDEC_MEMORY_FAIL = -1, /* memory allocation failed */
- AVCDEC_FAIL = 0,
- /**
- Generic success value
- */
- AVCDEC_SUCCESS = 1,
- AVCDEC_PICTURE_OUTPUT_READY = 2,
- AVCDEC_PICTURE_READY = 3,
-
- /**
- The followings are success with warnings. Their values are positive integers.
- */
- AVCDEC_NO_NEXT_SC = 4,
- AVCDEC_REDUNDANT_FRAME = 5,
- AVCDEC_CONCEALED_FRAME = 6 /* detect and conceal the error */
-} AVCDec_Status;
-
-
-/**
-This structure contains sequence parameters information.
-*/
-typedef struct tagAVCDecSPSInfo
-{
- int FrameWidth;
- int FrameHeight;
- uint frame_only_flag;
- int frame_crop_left;
- int frame_crop_right;
- int frame_crop_top;
- int frame_crop_bottom;
-
-} AVCDecSPSInfo;
-
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
- /** THE FOLLOWINGS ARE APIS */
- /**
- This function parses one NAL unit from byte stream format input according to Annex B.
- \param "bitstream" "Pointer to the bitstream buffer."
- \param "nal_unit" "Point to pointer and the location of the start of the first NAL unit
- found in bitstream."
- \param "size" "As input, the pointer to the size of bitstream in bytes. As output,
- the value is changed to be the size of the found NAL unit."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if no first start code is found, AVCDEC_NO_NEX_SC if
- the first start code is found, but the second start code is missing (potential partial NAL)."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, uint8 **nal_unit, int *size);
-
- /**
- This function sniffs the nal_unit_type such that users can call corresponding APIs.
- \param "bitstream" "Pointer to the beginning of a NAL unit (start with forbidden_zero_bit, etc.)."
- \param "size" "size of the bitstream (NumBytesInNALunit + 1)."
- \param "nal_unit_type" "Pointer to the return value of nal unit type."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL otherwise."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int size, int *nal_type, int *nal_ref_idc);
-
- /**
- This function decodes the sequence parameters set, initializes related parameters and
- allocates memory (reference frames list), must also be compliant with Annex A.
- It is equivalent to decode VOL header of MPEG4.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing single NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "nal_size" "size of the bitstream NumBytesInNALunit."
- \return "AVCDEC_SUCCESS if success,
- AVCDEC_FAIL if profile and level is not supported,
- AVCDEC_MEMORY_FAIL if memory allocations return null."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- /**
- This function returns sequence parameters such as dimension and field flag of the most recently
- decoded SPS. More can be added later or grouped together into a structure. This API can be called
- after PVAVCInitSequence. If no sequence parameter has been decoded yet, it will return AVCDEC_FAIL.
-
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "seqInfo" "Pointer to the AVCDecSeqParamInfo structure."
- \return "AVCDEC_SUCCESS if success and AVCDEC_FAIL if fail."
- \note "This API can be combined with PVAVCInitSequence if wanted to be consistent with m4vdec lib."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, AVCDecSPSInfo *seqInfo);
-
- /**
- This function decodes the picture parameters set and initializes related parameters. Note thate
- the PPS may not be present for every picture.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing single NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "nal_size" "size of the bitstream NumBytesInNALunit."
- \return "AVCDEC_SUCCESS if success, AVCDEC_FAIL if profile and level is not supported."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- /**
- This function decodes one NAL unit of bitstream. The type of nal unit is one of the
- followings, 1, 5. (for now, no data partitioning, type 2,3,4).
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "nal_unit" "Pointer to the buffer containing a single or partial NAL unit.
- The content will change due to EBSP-to-RBSP conversion."
- \param "buf_size" "Size of the buffer (less than or equal nal_size)."
- \param "nal_size" "size of the current NAL unit NumBytesInNALunit."
- \return "AVCDEC_PICTURE_READY for success and an output is ready,
- AVCDEC_SUCCESS for success but no output is ready,
- AVCDEC_PACKET_LOSS is GetData returns AVCDEC_PACKET_LOSS,
- AVCDEC_FAIL if syntax error is detected,
- AVCDEC_MEMORY_FAIL if memory is corrupted.
- AVCDEC_NO_PICTURE if no frame memory to write to (users need to get output and/or return picture).
- AVCDEC_REDUNDANT_PICTURE if error has been detected in the primary picture and redundant picture is available,
- AVCDEC_CONCEALED_PICTURE if error has been detected and decoder has concealed it."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 *nal_unit, int nal_size);
-
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uint8 *buffer, int buf_size);
-
- /**
- Check the availability of the decoded picture in decoding order (frame_num).
- The AVCFrameIO also provide displaying order information such that the application
- can re-order the frame for display. A picture can be retrieved only once.
- \param "avcHandle" "Handle to the AVC decoder library object."
- \param "output" "Pointer to the AVCOutput structure. Note that decoder library will
- not re-used the pixel memory in this structure until it has been returned
- thru PVAVCReleaseOutput API."
- \return "AVCDEC_SUCCESS for success, AVCDEC_FAIL if no picture is available to be displayed,
- AVCDEC_PICTURE_READY if there is another picture to be displayed."
- */
- OSCL_IMPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, int *indx, int *release_flag, AVCFrameIO *output);
-
- /**
- This function resets the decoder and expects to see the next IDR slice.
- \param "avcHandle" "Handle to the AVC decoder library object."
- */
- OSCL_IMPORT_REF void PVAVCDecReset(AVCHandle *avcHandle);
-
- /**
- This function performs clean up operation including memory deallocation.
- \param "avcHandle" "Handle to the AVC decoder library object."
- */
- OSCL_IMPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle);
-//AVCDec_Status EBSPtoRBSP(uint8 *nal_unit,int *size);
-
-
-
- /** CALLBACK FUNCTION TO BE IMPLEMENTED BY APPLICATION */
- /** In AVCHandle structure, userData is a pointer to an object with the following
- member functions.
- */
- AVCDec_Status CBAVCDec_GetData(uint32 *userData, unsigned char **buffer, unsigned int *size);
-
-#ifdef __cplusplus
-}
-#endif
-
-#endif /* _AVCDEC_API_H_ */
-
diff --git a/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h b/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h
deleted file mode 100644
index 6b196de..0000000
--- a/media/libstagefright/codecs/avc/dec/include/pvavcdecoder.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef PVAVCDECODER_H_INCLUDED
-#define PVAVCDECODER_H_INCLUDED
-
-#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
-#include "pvavcdecoderinterface.h"
-#endif
-
-// AVC video decoder
-class PVAVCDecoder : public PVAVCDecoderInterface
-{
- public:
- virtual ~PVAVCDecoder();
- static PVAVCDecoder* New(void);
- virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
- FunctionType_Malloc, FunctionType_Free, void *);
- virtual void CleanUpAVCDecoder(void);
- virtual void ResetAVCDecoder(void);
- virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size);
- virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size);
- virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size);
- virtual bool GetDecOutput(int *indx, int *release);
- virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right);
- int AVC_Malloc(int32 size, int attribute);
- void AVC_Free(int mem);
-
- private:
- PVAVCDecoder();
- bool Construct(void);
- void *iAVCHandle;
-};
-
-#endif
diff --git a/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h b/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h
deleted file mode 100644
index 027212d..0000000
--- a/media/libstagefright/codecs/avc/dec/include/pvavcdecoderinterface.h
+++ /dev/null
@@ -1,48 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#ifndef PVAVCDECODERINTERFACE_H_INCLUDED
-#define PVAVCDECODERINTERFACE_H_INCLUDED
-
-typedef void (*FunctionType_Unbind)(void *, int);
-typedef int (*FunctionType_Alloc)(void *, int, uint8 **);
-typedef int (*FunctionType_SPS)(void *, uint, uint);
-typedef int (*FunctionType_Malloc)(void *, int32, int);
-typedef void(*FunctionType_Free)(void *, int);
-
-
-// PVAVCDecoderInterface pure virtual interface class
-class PVAVCDecoderInterface
-{
- public:
- virtual ~PVAVCDecoderInterface() {};
- virtual bool InitAVCDecoder(FunctionType_SPS, FunctionType_Alloc, FunctionType_Unbind,
- FunctionType_Malloc, FunctionType_Free, void *) = 0;
- virtual void CleanUpAVCDecoder(void) = 0;
- virtual void ResetAVCDecoder(void) = 0;
- virtual int32 DecodeSPS(uint8 *bitstream, int32 buffer_size) = 0;
- virtual int32 DecodePPS(uint8 *bitstream, int32 buffer_size) = 0;
- virtual int32 DecodeAVCSlice(uint8 *bitstream, int32 *buffer_size) = 0;
- virtual bool GetDecOutput(int *indx, int *release) = 0;
- virtual void GetVideoDimensions(int32 *width, int32 *height, int32 *top, int32 *left, int32 *bottom, int32 *right) = 0;
-// virtual int AVC_Malloc(int32 size, int attribute);
-// virtual void AVC_Free(int mem);
-};
-
-#endif // PVAVCDECODERINTERFACE_H_INCLUDED
-
-
diff --git a/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp b/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp
deleted file mode 100644
index 270b664..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avc_bitstream.cpp
+++ /dev/null
@@ -1,276 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_bitstream.h"
-
-/* Swapping may not be needed anymore since we read one byte at a time and perform
-EBSP to RBSP conversion in bitstream. */
-#ifdef LITTLE_ENDIAN
-#if (WORD_SIZE==32) /* this can be replaced with assembly instructions */
-#define SWAP_BYTES(x) ((((x)&0xFF)<<24) | (((x)&0xFF00)<<8) | (((x)&0xFF0000)>>8) | (((x)&0xFF000000)>>24))
-#else /* for 16-bit */
-#define SWAP_BYTES(x) ((((x)&0xFF)<<8) | (((x)&0xFF00)>>8))
-#endif
-#else
-#define SWAP_BYTES(x) (x)
-#endif
-
-
-/* array for trailing bit pattern as function of number of bits */
-/* the first one is unused. */
-const static uint8 trailing_bits[9] = {0, 0x1, 0x2, 0x4, 0x8, 0x10, 0x20, 0x40, 0x80};
-
-/* ======================================================================== */
-/* Function : BitstreamInit() */
-/* Date : 11/4/2003 */
-/* Purpose : Populate bitstream structure with bitstream buffer and size */
-/* it also initializes internal data */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if failed. */
-/* Modified : */
-/* ======================================================================== */
-/* |--------|--------|----~~~~~-----|---------|---------|---------|
- ^ ^read_pos ^data_end_pos
- bitstreamBuffer <--------->
- current_word
-
- |xxxxxxxxxxxxx----| = current_word 32 or 16 bits
- <------------>
- bit_left
- ======================================================================== */
-
-
-/* ======================================================================== */
-/* Function : BitstreamNextWord() */
-/* Date : 12/4/2003 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : Next word with emulation prevention code removed. Everything
- in the bitstream structure got modified except current_word */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size)
-{
- EBSPtoRBSP(buffer, &size);
-
- stream->incnt = 0;
- stream->incnt_next = 0;
- stream->bitcnt = 0;
- stream->curr_word = stream->next_word = 0;
- stream->read_pos = 0;
-
- stream->bitstreamBuffer = buffer;
-
- stream->data_end_pos = size;
-
- stream->nal_size = size;
-
- return AVCDEC_SUCCESS;
-}
-/* ======================================================================== */
-/* Function : AVC_BitstreamFillCache() */
-/* Date : 1/1/2005 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : Read in 4 bytes of input data */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream)
-{
- uint8 *bitstreamBuffer = stream->bitstreamBuffer;
- uint8 *v;
- int num_bits, i;
-
- stream->curr_word |= (stream->next_word >> stream->incnt); // stream->incnt cannot be 32
- stream->next_word <<= (31 - stream->incnt);
- stream->next_word <<= 1;
- num_bits = stream->incnt_next + stream->incnt;
- if (num_bits >= 32)
- {
- stream->incnt_next -= (32 - stream->incnt);
- stream->incnt = 32;
- return AVCDEC_SUCCESS;
- }
- /* this check can be removed if there is additional extra 4 bytes at the end of the bitstream */
- v = bitstreamBuffer + stream->read_pos;
-
- if (stream->read_pos > stream->data_end_pos - 4)
- {
- if (stream->data_end_pos <= stream->read_pos)
- {
- stream->incnt = num_bits;
- stream->incnt_next = 0;
- return AVCDEC_SUCCESS;
- }
-
- stream->next_word = 0;
-
- for (i = 0; i < stream->data_end_pos - stream->read_pos; i++)
- {
- stream->next_word |= (v[i] << ((3 - i) << 3));
- }
-
- stream->read_pos = stream->data_end_pos;
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
-
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- num_bits = i << 3;
- stream->incnt += stream->incnt_next;
- stream->incnt_next = num_bits - (32 - stream->incnt);
- if (stream->incnt_next < 0)
- {
- stream->incnt += num_bits;
- stream->incnt_next = 0;
- }
- else
- {
- stream->incnt = 32;
- }
- return AVCDEC_SUCCESS;
- }
-
- stream->next_word = ((uint32)v[0] << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
- stream->read_pos += 4;
-
- stream->curr_word |= (stream->next_word >> num_bits); // this is safe
- stream->next_word <<= (31 - num_bits);
- stream->next_word <<= 1;
- stream->incnt_next += stream->incnt;
- stream->incnt = 32;
- return AVCDEC_SUCCESS;
-
-}
-/* ======================================================================== */
-/* Function : BitstreamReadBits() */
-/* Date : 11/4/2003 */
-/* Purpose : Read up to machine word. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
-/* AVCDEC_NO_DATA if callback to get data fails. */
-/* Modified : */
-/* ======================================================================== */
-AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code)
-{
- if (stream->incnt < nBits)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> (32 - nBits);
- BitstreamFlushBits(stream, nBits);
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* ======================================================================== */
-/* Function : BitstreamShowBits() */
-/* Date : 11/4/2003 */
-/* Purpose : Show up to machine word without advancing the pointer. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_NO_DATA if it needs */
-/* to callback to get data. */
-/* Modified : */
-/* ======================================================================== */
-AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code)
-{
- if (stream->incnt < nBits)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
-
- *code = stream->curr_word >> (32 - nBits);
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : BitstreamRead1Bit() */
-/* Date : 11/4/2003 */
-/* Purpose : Read 1 bit from the bitstream. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits */
-/* is greater than the word-size, AVCDEC_PACKET_LOSS or */
-/* AVCDEC_NO_DATA if callback to get data fails. */
-/* Modified : */
-/* ======================================================================== */
-
-AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code)
-{
- if (stream->incnt < 1)
- {
- /* frame-based decoding */
- AVC_BitstreamFillCache(stream);
- }
- *code = stream->curr_word >> 31;
- BitstreamFlushBits(stream, 1);
- return AVCDEC_SUCCESS;
-}
-
-
-
-AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream)
-{
- uint n_stuffed;
-
- n_stuffed = (8 - (stream->bitcnt & 0x7)) & 0x7; /* 07/05/01 */
-
- stream->bitcnt += n_stuffed;
- stream->incnt -= n_stuffed;
-
- if (stream->incnt < 0)
- {
- stream->bitcnt += stream->incnt;
- stream->incnt = 0;
- }
- stream->curr_word <<= n_stuffed;
- return AVCDEC_SUCCESS;
-}
-
-/* check whether there are more RBSP data. */
-/* ignore the emulation prevention code, assume it has been taken out. */
-bool more_rbsp_data(AVCDecBitstream *stream)
-{
- int total_bit_left;
- uint code;
-
- if (stream->read_pos >= stream->nal_size)
- {
- total_bit_left = stream->incnt_next + stream->incnt;
- if (total_bit_left <= 0)
- {
- return FALSE;
- }
- else if (total_bit_left <= 8)
- {
- BitstreamShowBits(stream, total_bit_left, &code);
- if (code == trailing_bits[total_bit_left])
- {
- return FALSE;
- }
- }
- }
-
- return TRUE;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp b/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp
deleted file mode 100644
index 0a75f17..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_api.cpp
+++ /dev/null
@@ -1,1036 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library.
-@publishedAll
-*/
-
-#include <string.h>
-
-#include "avcdec_api.h"
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-/* ======================================================================== */
-/* Function : EBSPtoRBSP() */
-/* Date : 11/4/2003 */
-/* Purpose : Convert EBSP to RBSP and overwrite it. */
-/* Assuming that forbidden_zero, nal_ref_idc and nal_unit_type */
-/* (first byte), has been taken out of the nal_unit. */
-/* In/out : */
-/* Return : */
-/* Modified : */
-/* ======================================================================== */
-/**
-@pseudocode "
- NumBytesInRBSP = 0;
- for(i=0:i< *size; i++){
- if(i+2 < *size && next_bits(24)==0x000003){
- rbsp_byte[NumBytesInRBSP++];
- rbsp_byte[NumBytesInRBSP++];
- i+=2;
- emulation_prevention_three_byte (0x03)
- }
- else
- rbsp_byte[NumBytesInRBSP++];
- }"
-*/
-AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size)
-{
- int i, j;
- int count = 0;
-
- /* This code is based on EBSPtoRBSP of JM */
- j = 0;
-
- for (i = 0; i < *size; i++)
- {
- if (count == 2 && nal_unit[i] == 0x03)
- {
- i++;
- count = 0;
- }
- nal_unit[j] = nal_unit[i];
- if (nal_unit[i] == 0x00)
- count++;
- else
- count = 0;
- j++;
- }
-
- *size = j;
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCAnnexBGetNALUnit() */
-/* Date : 11/3/2003 */
-/* Purpose : Parse a NAL from byte stream format. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-/**
-@pseudocode "
- byte_stream_nal_unit(NumBytesInNalunit){
- while(next_bits(24) != 0x000001)
- zero_byte
- if(more_data_in_byte_stream()){
- start_code_prefix_one_3bytes // equal 0x000001
- nal_unit(NumBytesInNALunit)
- }
- }"
-*/
-OSCL_EXPORT_REF AVCDec_Status PVAVCAnnexBGetNALUnit(uint8 *bitstream, uint8 **nal_unit,
- int *size)
-{
- int i, j, FoundStartCode = 0;
- int end;
-
- i = 0;
- while (bitstream[i] == 0 && i < *size)
- {
- i++;
- }
- if (i >= *size)
- {
- *nal_unit = bitstream;
- return AVCDEC_FAIL; /* cannot find any start_code_prefix. */
- }
- else if (bitstream[i] != 0x1)
- {
- i = -1; /* start_code_prefix is not at the beginning, continue */
- }
-
- i++;
- *nal_unit = bitstream + i; /* point to the beginning of the NAL unit */
-
- j = end = i;
- while (!FoundStartCode)
- {
- while ((j + 1 < *size) && (bitstream[j] != 0 || bitstream[j+1] != 0)) /* see 2 consecutive zero bytes */
- {
- j++;
- }
- end = j; /* stop and check for start code */
- while (j + 2 < *size && bitstream[j+2] == 0) /* keep reading for zero byte */
- {
- j++;
- }
- if (j + 2 >= *size)
- {
- *size -= i;
- return AVCDEC_NO_NEXT_SC; /* cannot find the second start_code_prefix */
- }
- if (bitstream[j+2] == 0x1)
- {
- FoundStartCode = 1;
- }
- else
- {
- /* could be emulation code 0x3 */
- j += 2; /* continue the search */
- }
- }
-
- *size = end - i;
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCGetNALType() */
-/* Date : 11/4/2003 */
-/* Purpose : Sniff NAL type from the bitstream */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetNALType(uint8 *bitstream, int size,
- int *nal_type, int *nal_ref_idc)
-{
- int forbidden_zero_bit;
- if (size > 0)
- {
- forbidden_zero_bit = bitstream[0] >> 7;
- if (forbidden_zero_bit != 0)
- return AVCDEC_FAIL;
- *nal_ref_idc = (bitstream[0] & 0x60) >> 5;
- *nal_type = bitstream[0] & 0x1F;
- return AVCDEC_SUCCESS;
- }
-
- return AVCDEC_FAIL;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecSeqParamSet() */
-/* Date : 11/4/2003 */
-/* Purpose : Initialize sequence, memory allocation if necessary. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecSeqParamSet(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- AVCDec_Status status;
- AVCDecObject *decvid;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
- void *userData = avcHandle->userData;
- bool first_seq = FALSE;
- int i;
-
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "PVAVCDecSeqParamSet", -1, -1);
-
- if (avcHandle->AVCObject == NULL)
- {
- first_seq = TRUE;
-
- //avcHandle->memory_usage = 0;
- /* allocate AVCDecObject */
- avcHandle->AVCObject = (void*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecObject), 0/*DEFAULT_ATTR*/);
- if (avcHandle->AVCObject == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid = (AVCDecObject*) avcHandle->AVCObject;
-
- memset(decvid, 0, sizeof(AVCDecObject));
-
- decvid->common = (AVCCommonObj*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCCommonObj), 0);
- if (decvid->common == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- video = decvid->common;
- memset(video, 0, sizeof(AVCCommonObj));
-
- video->seq_parameter_set_id = 9999; /* set it to some illegal value */
-
- decvid->bitstream = (AVCDecBitstream *) avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecBitstream), 1/*DEFAULT_ATTR*/);
- if (decvid->bitstream == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid->bitstream->userData = avcHandle->userData; /* callback for more data */
- decvid->avcHandle = avcHandle;
- decvid->debugEnable = avcHandle->debugEnable;
- }
-
- decvid = (AVCDecObject*) avcHandle->AVCObject;
- video = decvid->common;
- bitstream = decvid->bitstream;
-
- /* check if we can reuse the memory without re-allocating it. */
- /* always check if(first_seq==TRUE) */
-
- /* Conversion from EBSP to RBSP */
- video->forbidden_bit = nal_unit[0] >> 7;
- if (video->forbidden_bit) return AVCDEC_FAIL;
- video->nal_ref_idc = (nal_unit[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(nal_unit[0] & 0x1F);
-
- if (video->nal_unit_type != AVC_NALTYPE_SPS) /* not a SPS NAL */
- {
- return AVCDEC_FAIL;
- }
-
- /* Initialize bitstream structure*/
- BitstreamInit(bitstream, nal_unit + 1, nal_size - 1);
-
- /* if first_seq == TRUE, allocate the following memory */
- if (first_seq == TRUE)
- {
- video->currSeqParams = NULL; /* initialize it to NULL */
- video->currPicParams = NULL;
-
- /* There are 32 pointers to sequence param set, seqParams.
- There are 255 pointers to picture param set, picParams.*/
- for (i = 0; i < 32; i++)
- decvid->seqParams[i] = NULL;
-
- for (i = 0; i < 256; i++)
- decvid->picParams[i] = NULL;
-
- video->MbToSliceGroupMap = NULL;
-
- video->mem_mgr_ctrl_eq_5 = FALSE;
- video->newPic = TRUE;
- video->newSlice = TRUE;
- video->currPic = NULL;
- video->currFS = NULL;
- video->prevRefPic = NULL;
-
- video->mbNum = 0; // MC_Conceal
- /* Allocate sliceHdr. */
-
- video->sliceHdr = (AVCSliceHeader*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSliceHeader), 5/*DEFAULT_ATTR*/);
- if (video->sliceHdr == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- video->decPicBuf = (AVCDecPicBuffer*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCDecPicBuffer), 3/*DEFAULT_ATTR*/);
- if (video->decPicBuf == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
- memset(video->decPicBuf, 0, sizeof(AVCDecPicBuffer));
- }
-
- /* Decode SPS, allocate video->seqParams[i] and assign video->currSeqParams */
- status = DecodeSPS(decvid, bitstream);
-
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecGetSeqInfo() */
-/* Date : 11/4/2003 */
-/* Purpose : Get sequence parameter info. after SPS NAL is decoded. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* 12/20/03: change input argument, use structure instead. */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetSeqInfo(AVCHandle *avcHandle, AVCDecSPSInfo *seqInfo)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- int PicWidthInMbs, PicHeightInMapUnits, FrameHeightInMbs;
-
- if (decvid == NULL || decvid->seqParams[0] == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
-
- PicWidthInMbs = decvid->seqParams[0]->pic_width_in_mbs_minus1 + 1;
- PicHeightInMapUnits = decvid->seqParams[0]->pic_height_in_map_units_minus1 + 1 ;
- FrameHeightInMbs = (2 - decvid->seqParams[0]->frame_mbs_only_flag) * PicHeightInMapUnits ;
-
- seqInfo->FrameWidth = PicWidthInMbs << 4;
- seqInfo->FrameHeight = FrameHeightInMbs << 4;
-
- seqInfo->frame_only_flag = decvid->seqParams[0]->frame_mbs_only_flag;
-
- if (decvid->seqParams[0]->frame_cropping_flag)
- {
- seqInfo->frame_crop_left = 2 * decvid->seqParams[0]->frame_crop_left_offset;
- seqInfo->frame_crop_right = seqInfo->FrameWidth - (2 * decvid->seqParams[0]->frame_crop_right_offset + 1);
-
- if (seqInfo->frame_only_flag)
- {
- seqInfo->frame_crop_top = 2 * decvid->seqParams[0]->frame_crop_top_offset;
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - (2 * decvid->seqParams[0]->frame_crop_bottom_offset + 1);
- /* Note in 7.4.2.1, there is a contraint on the value of frame_crop_left and frame_crop_top
- such that they have to be less than or equal to frame_crop_right/2 and frame_crop_bottom/2, respectively. */
- }
- else
- {
- seqInfo->frame_crop_top = 4 * decvid->seqParams[0]->frame_crop_top_offset;
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - (4 * decvid->seqParams[0]->frame_crop_bottom_offset + 1);
- /* Note in 7.4.2.1, there is a contraint on the value of frame_crop_left and frame_crop_top
- such that they have to be less than or equal to frame_crop_right/2 and frame_crop_bottom/4, respectively. */
- }
- }
- else /* no cropping flag, just give the first and last pixel */
- {
- seqInfo->frame_crop_bottom = seqInfo->FrameHeight - 1;
- seqInfo->frame_crop_right = seqInfo->FrameWidth - 1;
- seqInfo->frame_crop_top = seqInfo->frame_crop_left = 0;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecPicParamSet() */
-/* Date : 11/4/2003 */
-/* Purpose : Initialize picture */
-/* create reference picture list. */
-/* In/out : */
-/* Return : AVCDEC_SUCCESS if succeed, AVC_FAIL if fail. */
-/* Modified : */
-/* ======================================================================== */
-/**
-Since PPS doesn't contain much data, most of the picture initialization will
-be done after decoding the slice header in PVAVCDecodeSlice. */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecPicParamSet(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- AVCDec_Status status;
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- bitstream = decvid->bitstream;
- /* 1. Convert EBSP to RBSP. Create bitstream structure */
- video->forbidden_bit = nal_unit[0] >> 7;
- video->nal_ref_idc = (nal_unit[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(nal_unit[0] & 0x1F);
-
- if (video->nal_unit_type != AVC_NALTYPE_PPS) /* not a PPS NAL */
- {
- return AVCDEC_FAIL;
- }
-
-
- /* 2. Initialize bitstream structure*/
- BitstreamInit(bitstream, nal_unit + 1, nal_size - 1);
-
- /* 2. Decode pic_parameter_set_rbsp syntax. Allocate video->picParams[i] and assign to currPicParams */
- status = DecodePPS(decvid, video, bitstream);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
- video->SliceGroupChangeRate = video->currPicParams->slice_group_change_rate_minus1 + 1 ;
-
- return AVCDEC_SUCCESS;
-}
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecSEI(AVCHandle *avcHandle, uint8 *nal_unit,
- int nal_size)
-{
- OSCL_UNUSED_ARG(avcHandle);
- OSCL_UNUSED_ARG(nal_unit);
- OSCL_UNUSED_ARG(nal_size);
-
- return AVCDEC_SUCCESS;
-}
-/* ======================================================================== */
-/* Function : PVAVCDecodeSlice() */
-/* Date : 11/4/2003 */
-/* Purpose : Decode one NAL unit. */
-/* In/out : */
-/* Return : See enum AVCDec_Status for return values. */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecodeSlice(AVCHandle *avcHandle, uint8 *buffer,
- int buf_size)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecBitstream *bitstream;
- AVCDec_Status status;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- bitstream = decvid->bitstream;
-
- if (video->mem_mgr_ctrl_eq_5)
- {
- return AVCDEC_PICTURE_OUTPUT_READY; // to flushout frame buffers
- }
-
- if (video->newSlice)
- {
- /* 2. Check NAL type */
- if (buffer == NULL)
- {
- return AVCDEC_FAIL;
- }
- video->prev_nal_unit_type = video->nal_unit_type;
- video->forbidden_bit = buffer[0] >> 7;
- video->nal_ref_idc = (buffer[0] & 0x60) >> 5;
- video->nal_unit_type = (AVCNalUnitType)(buffer[0] & 0x1F);
-
-
- if (video->nal_unit_type == AVC_NALTYPE_AUD)
- {
- return AVCDEC_SUCCESS;
- }
-
- if (video->nal_unit_type != AVC_NALTYPE_SLICE &&
- video->nal_unit_type != AVC_NALTYPE_IDR)
- {
- return AVCDEC_FAIL; /* not supported */
- }
-
-
-
- if (video->nal_unit_type >= 2 && video->nal_unit_type <= 4)
- {
- return AVCDEC_FAIL; /* not supported */
- }
- else
- {
- video->slice_data_partitioning = FALSE;
- }
-
- video->newSlice = FALSE;
- /* Initialize bitstream structure*/
- BitstreamInit(bitstream, buffer + 1, buf_size - 1);
-
-
- /* 2.1 Decode Slice Header (separate function)*/
- status = DecodeSliceHeader(decvid, video, bitstream);
- if (status != AVCDEC_SUCCESS)
- {
- video->newSlice = TRUE;
- return status;
- }
-
- if (video->sliceHdr->frame_num != video->prevFrameNum || (video->sliceHdr->first_mb_in_slice < (uint)video->mbNum && video->currSeqParams->constrained_set1_flag == 1))
- {
- video->newPic = TRUE;
- if (video->numMBs > 0)
- {
- // Conceal missing MBs of previously decoded frame
- ConcealSlice(decvid, video->PicSizeInMbs - video->numMBs, video->PicSizeInMbs); // Conceal
- video->numMBs = 0;
-
- // DeblockPicture(video); // No need to deblock
-
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- AVCNalUnitType temp = video->nal_unit_type;
- video->nal_unit_type = video->prev_nal_unit_type;
- StorePictureInDPB(avcHandle, video);
- video->nal_unit_type = temp;
- video->mbNum = 0; // MC_Conceal
- return AVCDEC_PICTURE_OUTPUT_READY;
- }
- }
-
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevFrameNum = 0;
- video->PrevRefFrameNum = 0;
- }
-
- if (!video->currSeqParams->gaps_in_frame_num_value_allowed_flag)
- { /* no gaps allowed, frame_num has to increase by one only */
- /* if(sliceHdr->frame_num != (video->PrevRefFrameNum + 1)%video->MaxFrameNum) */
- if (video->sliceHdr->frame_num != video->PrevRefFrameNum && video->sliceHdr->frame_num != (video->PrevRefFrameNum + 1) % video->MaxFrameNum)
- {
- // Conceal missing MBs of previously decoded frame
- video->numMBs = 0;
- video->newPic = TRUE;
- video->prevFrameNum++; // FIX
- video->PrevRefFrameNum++;
- AVCNalUnitType temp = video->nal_unit_type;
- video->nal_unit_type = AVC_NALTYPE_SLICE; //video->prev_nal_unit_type;
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- video->currFS->IsOutputted = 0x01;
- video->currFS->IsReference = 3;
- video->currFS->IsLongTerm = 0;
-
- DecodePOC(video);
- /* find an empty memory from DPB and assigned to currPic */
- DPBInitPic(video, video->PrevRefFrameNum % video->MaxFrameNum);
- RefListInit(video);
- ConcealSlice(decvid, 0, video->PicSizeInMbs); // Conceal
- video->currFS->IsOutputted |= 0x02;
- //conceal frame
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- video->mbNum = 0; // Conceal
- StorePictureInDPB(avcHandle, video);
- video->nal_unit_type = temp;
-
- return AVCDEC_PICTURE_OUTPUT_READY;
- }
- }
- }
-
- if (video->newPic == TRUE)
- {
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
-
- video->newSlice = TRUE;
-
- /* function pointer setting at slice-level */
- // OPTIMIZE
- decvid->residual_block = &residual_block_cavlc;
-
- /* derive picture order count */
- if (video->newPic == TRUE)
- {
- video->numMBs = video->PicSizeInMbs;
-
- if (video->nal_unit_type != AVC_NALTYPE_IDR && video->currSeqParams->gaps_in_frame_num_value_allowed_flag)
- {
- if (video->sliceHdr->frame_num != (video->PrevRefFrameNum + 1) % video->MaxFrameNum)
- {
- status = fill_frame_num_gap(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- video->numMBs = 0;
- return status;
- }
-
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS)
- {
- video->numMBs = 0;
- return status;
- }
-
-
- }
- }
- /* if there's gap in the frame_num, we have to fill in the gap with
- imaginary frames that won't get used for short-term ref. */
- /* see fill_frame_num_gap() in JM */
-
-
- DecodePOC(video);
- /* find an empty memory from DPB and assigned to currPic */
- DPBInitPic(video, video->CurrPicNum);
-
- video->currPic->isReference = TRUE; // FIX
-
- if (video->nal_ref_idc == 0)
- {
- video->currPic->isReference = FALSE;
- video->currFS->IsOutputted |= 0x02; /* The MASK 0x02 means not needed for reference, or returned */
- /* node need to check for freeing of this buffer */
- }
-
- FMOInit(video);
-
- if (video->currPic->isReference)
- {
- video->PrevRefFrameNum = video->sliceHdr->frame_num;
- }
-
-
- video->prevFrameNum = video->sliceHdr->frame_num;
- }
-
- video->newPic = FALSE;
-
-
- /* Initialize refListIdx for this picture */
- RefListInit(video);
-
- /* Re-order the reference list according to the ref_pic_list_reordering() */
- status = (AVCDec_Status)ReOrderList(video);
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- /* 2.2 Decode Slice. */
- status = (AVCDec_Status)DecodeSlice(decvid);
-
- video->slice_id++; // slice
-
- if (status == AVCDEC_PICTURE_READY)
- {
- /* 3. Check complete picture */
-#ifndef MB_BASED_DEBLOCK
- /* 3.1 Deblock */
- DeblockPicture(video);
-#endif
- /* 3.2 Decoded frame reference marking. */
- /* 3.3 Put the decoded picture in output buffers */
- /* set video->mem_mge_ctrl_eq_5 */
- status = (AVCDec_Status)StorePictureInDPB(avcHandle, video); // CHECK check the retunr status
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- if (video->mem_mgr_ctrl_eq_5)
- {
- video->PrevRefFrameNum = 0;
- video->prevFrameNum = 0;
- video->prevPicOrderCntMsb = 0;
- video->prevPicOrderCntLsb = video->TopFieldOrderCnt;
- video->prevFrameNumOffset = 0;
- }
- else
- {
- video->prevPicOrderCntMsb = video->PicOrderCntMsb;
- video->prevPicOrderCntLsb = video->sliceHdr->pic_order_cnt_lsb;
- video->prevFrameNumOffset = video->FrameNumOffset;
- }
-
- return AVCDEC_PICTURE_READY;
- }
- else if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* ======================================================================== */
-/* Function : PVAVCDecGetOutput() */
-/* Date : 11/3/2003 */
-/* Purpose : Get the next picture according to PicOrderCnt. */
-/* In/out : */
-/* Return : AVCFrameIO structure */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF AVCDec_Status PVAVCDecGetOutput(AVCHandle *avcHandle, int *indx, int *release, AVCFrameIO *output)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecPicBuffer *dpb;
- AVCFrameStore *oldestFrame = NULL;
- int i, first = 1;
- int count_frame = 0;
- int index = 0;
- int min_poc = 0;
-
- if (decvid == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- video = decvid->common;
- dpb = video->decPicBuf;
-
- if (dpb->num_fs == 0)
- {
- return AVCDEC_FAIL;
- }
-
- /* search for the oldest frame_num in dpb */
- /* extension to field decoding, we have to search for every top_field/bottom_field within
- each frame in the dpb. This code only works for frame based.*/
-
- if (video->mem_mgr_ctrl_eq_5 == FALSE)
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if ((dpb->fs[i]->IsOutputted & 0x01) == 0)
- {
- count_frame++;
- if (first)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- first = 0;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- if (dpb->fs[i]->PicOrderCnt < min_poc)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- }
- }
- }
- else
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if ((dpb->fs[i]->IsOutputted & 0x01) == 0 && dpb->fs[i] != video->currFS)
- {
- count_frame++;
- if (first)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- first = 0;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- if (dpb->fs[i]->PicOrderCnt < min_poc)
- {
- min_poc = dpb->fs[i]->PicOrderCnt;
- oldestFrame = dpb->fs[i];
- index = i;
- }
- }
- }
-
- if (count_frame < 2 && video->nal_unit_type != AVC_NALTYPE_IDR)
- {
- video->mem_mgr_ctrl_eq_5 = FALSE; // FIX
- }
- else if (count_frame < 1 && video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- for (i = 0; i < dpb->num_fs; i++)
- {
- if (dpb->fs[i] == video->currFS && (dpb->fs[i]->IsOutputted & 0x01) == 0)
- {
- oldestFrame = dpb->fs[i];
- index = i;
- break;
- }
- }
- video->mem_mgr_ctrl_eq_5 = FALSE;
- }
- }
-
- if (oldestFrame == NULL)
- {
-
- /* Check for Mem_mgmt_operation_5 based forced output */
- for (i = 0; i < dpb->num_fs; i++)
- {
- /* looking for the one not used or not reference and has been outputted */
- if (dpb->fs[i]->IsReference == 0 && dpb->fs[i]->IsOutputted == 3)
- {
- break;
- }
- }
- if (i < dpb->num_fs)
- {
- /* there are frames available for decoding */
- return AVCDEC_FAIL; /* no frame to be outputted */
- }
-
-
- /* no free frame available, we have to release one to continue decoding */
- int MinIdx = 0;
- int32 MinFrameNumWrap = 0x7FFFFFFF;
-
- for (i = 0; i < dpb->num_fs; i++)
- {
- if (dpb->fs[i]->IsReference && !dpb->fs[i]->IsLongTerm)
- {
- if (dpb->fs[i]->FrameNumWrap < MinFrameNumWrap)
- {
- MinFrameNumWrap = dpb->fs[i]->FrameNumWrap;
- MinIdx = i;
- }
- }
- }
- /* mark the frame with smallest PicOrderCnt to be unused for reference */
- dpb->fs[MinIdx]->IsReference = 0;
- dpb->fs[MinIdx]->IsLongTerm = 0;
- dpb->fs[MinIdx]->frame.isReference = FALSE;
- dpb->fs[MinIdx]->frame.isLongTerm = FALSE;
- dpb->fs[MinIdx]->IsOutputted |= 0x02;
-#ifdef PV_MEMORY_POOL
- if (dpb->fs[MinIdx]->IsOutputted == 3)
- {
- avcHandle->CBAVC_FrameUnbind(avcHandle->userData, MinIdx);
- }
-#endif
- return AVCDEC_FAIL;
- }
- /* MASK 0x01 means the frame is outputted (for display). A frame gets freed when it is
- outputted (0x01) and not needed for reference (0x02) */
- oldestFrame->IsOutputted |= 0x01;
-
- if (oldestFrame->IsOutputted == 3)
- {
- *release = 1; /* flag to release the buffer */
- }
- else
- {
- *release = 0;
- }
- /* do not release buffer here, release it after it is sent to the sink node */
-
- output->YCbCr[0] = oldestFrame->frame.Sl;
- output->YCbCr[1] = oldestFrame->frame.Scb;
- output->YCbCr[2] = oldestFrame->frame.Scr;
- output->height = oldestFrame->frame.height;
- output->pitch = oldestFrame->frame.width;
- output->disp_order = oldestFrame->PicOrderCnt;
- output->coding_order = oldestFrame->FrameNum;
- output->id = (uint32) oldestFrame->base_dpb; /* use the pointer as the id */
- *indx = index;
-
-
-
- return AVCDEC_SUCCESS;
-}
-
-
-/* ======================================================================== */
-/* Function : PVAVCDecReset() */
-/* Date : 03/04/2004 */
-/* Purpose : Reset decoder, prepare it for a new IDR frame. */
-/* In/out : */
-/* Return : void */
-/* Modified : */
-/* ======================================================================== */
-OSCL_EXPORT_REF void PVAVCDecReset(AVCHandle *avcHandle)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- AVCDecPicBuffer *dpb;
- int i;
-
- if (decvid == NULL)
- {
- return;
- }
-
- video = decvid->common;
- dpb = video->decPicBuf;
-
- /* reset the DPB */
-
-
- for (i = 0; i < dpb->num_fs; i++)
- {
- dpb->fs[i]->IsLongTerm = 0;
- dpb->fs[i]->IsReference = 0;
- dpb->fs[i]->IsOutputted = 3;
- dpb->fs[i]->frame.isReference = 0;
- dpb->fs[i]->frame.isLongTerm = 0;
- }
-
- video->mem_mgr_ctrl_eq_5 = FALSE;
- video->newPic = TRUE;
- video->newSlice = TRUE;
- video->currPic = NULL;
- video->currFS = NULL;
- video->prevRefPic = NULL;
- video->prevFrameNum = 0;
- video->PrevRefFrameNum = 0;
- video->prevFrameNumOffset = 0;
- video->FrameNumOffset = 0;
- video->mbNum = 0;
- video->numMBs = 0;
-
- return ;
-}
-
-
-/* ======================================================================== */
-/* Function : PVAVCCleanUpDecoder() */
-/* Date : 11/4/2003 */
-/* Purpose : Clean up the decoder, free all memories allocated. */
-/* In/out : */
-/* Return : void */
-/* Modified : */
-/* ======================================================================== */
-
-OSCL_EXPORT_REF void PVAVCCleanUpDecoder(AVCHandle *avcHandle)
-{
- AVCDecObject *decvid = (AVCDecObject*) avcHandle->AVCObject;
- AVCCommonObj *video;
- void *userData = avcHandle->userData;
- int i;
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "PVAVCCleanUpDecoder", -1, -1);
-
- if (decvid != NULL)
- {
- video = decvid->common;
- if (video != NULL)
- {
- if (video->MbToSliceGroupMap != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->MbToSliceGroupMap);
- }
-
-#ifdef MB_BASED_DEBLOCK
- if (video->intra_pred_top != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top);
- }
- if (video->intra_pred_top_cb != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top_cb);
- }
- if (video->intra_pred_top_cr != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->intra_pred_top_cr);
- }
-#endif
- if (video->mblock != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->mblock);
- }
-
- if (video->decPicBuf != NULL)
- {
- CleanUpDPB(avcHandle, video);
- avcHandle->CBAVC_Free(userData, (int)video->decPicBuf);
- }
-
- if (video->sliceHdr != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)video->sliceHdr);
- }
-
- avcHandle->CBAVC_Free(userData, (int)video); /* last thing to do */
-
- }
-
- for (i = 0; i < 256; i++)
- {
- if (decvid->picParams[i] != NULL)
- {
- if (decvid->picParams[i]->slice_group_id != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[i]->slice_group_id);
- }
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[i]);
- }
- }
- for (i = 0; i < 32; i++)
- {
- if (decvid->seqParams[i] != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->seqParams[i]);
- }
- }
- if (decvid->bitstream != NULL)
- {
- avcHandle->CBAVC_Free(userData, (int)decvid->bitstream);
- }
-
-
- avcHandle->CBAVC_Free(userData, (int)decvid);
- }
-
-
- return ;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h b/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h
deleted file mode 100644
index bd1bc59..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_bitstream.h
+++ /dev/null
@@ -1,125 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains bitstream related functions.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_BITSTREAM_H_
-#define _AVCDEC_BITSTREAM_H_
-
-#include "avcdec_lib.h"
-
-#define WORD_SIZE 32 /* this can vary, default to 32 bit for now */
-
-#ifndef __cplusplus
-
-#define AVC_GETDATA(x,y) userData->AVC_GetData(x,y)
-
-#endif
-
-#ifdef __cplusplus
-extern "C"
-{
-#endif
-#define BitstreamFlushBits(A,B) {(A)->bitcnt += (B); (A)->incnt -= (B); (A)->curr_word <<= (B);}
-
- AVCDec_Status AVC_BitstreamFillCache(AVCDecBitstream *stream);
- /**
- This function populates bitstream structure.
- \param "stream" "Pointer to bitstream structure."
- \param "buffer" "Pointer to the bitstream buffer."
- \param "size" "Size of the buffer."
- \param "nal_size" "Size of the NAL unit."
- \param "resetall" "Flag for reset everything."
- \return "AVCDEC_SUCCESS for success and AVCDEC_FAIL for fail."
- */
- AVCDec_Status BitstreamInit(AVCDecBitstream *stream, uint8 *buffer, int size);
-
- /**
- This function reads next aligned word and remove the emulation prevention code
- if necessary.
- \param "stream" "Pointer to bitstream structure."
- \return "Next word."
- */
- uint BitstreamNextWord(AVCDecBitstream *stream);
-
- /**
- This function reads nBits bits from the current position and advance the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_PACKET_LOSS or
- AVCDEC_NO_DATA if callback to get data fails."
- */
- AVCDec_Status BitstreamReadBits(AVCDecBitstream *stream, int nBits, uint *code);
-
- /**
- This function shows nBits bits from the current position without advancing the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_NO_DATA if it needs
- to callback to get data."
- */
- AVCDec_Status BitstreamShowBits(AVCDecBitstream *stream, int nBits, uint *code);
-
-
- /**
- This function flushes nBits bits from the current position.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size It will not call back to get
- more data. Users should call BitstreamShowBits to determine
- how much they want to flush."
- */
-
- /**
- This function read 1 bit from the current position and advance the pointer.
- \param "stream" "Pointer to bitstream structure."
- \param "nBits" "Number of bits to be read."
- \param "code" "Point to the read value."
- \return "AVCDEC_SUCCESS if successed, AVCDEC_FAIL if number of bits
- is greater than the word-size, AVCDEC_PACKET_LOSS or
- AVCDEC_NO_DATA if callback to get data fails."
- */
- AVCDec_Status BitstreamRead1Bit(AVCDecBitstream *stream, uint *code);
-
- /**
- This function checks whether the current bit position is byte-aligned or not.
- \param "stream" "Pointer to the bitstream structure."
- \return "TRUE if byte-aligned, FALSE otherwise."
- */
- bool byte_aligned(AVCDecBitstream *stream);
- AVCDec_Status BitstreamByteAlign(AVCDecBitstream *stream);
- /**
- This function checks whether there are more RBSP data before the trailing bits.
- \param "stream" "Pointer to the bitstream structure."
- \return "TRUE if yes, FALSE otherwise."
- */
- bool more_rbsp_data(AVCDecBitstream *stream);
-
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-#endif /* _AVCDEC_BITSTREAM_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_int.h b/media/libstagefright/codecs/avc/dec/src/avcdec_int.h
deleted file mode 100644
index 878f9b3..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_int.h
+++ /dev/null
@@ -1,88 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains application function interfaces to the AVC decoder library
-and necessary type defitionitions and enumerations.
-Naming convention for variables:
-lower_case_with_under_line is syntax element in subclause 7.2 and 7.3
-noUnderLine or NoUnderLine is derived variables defined somewhere else in the draft
- or introduced by this decoder library.
-@publishedAll
-*/
-
-#ifndef _AVCDEC_INT_H_
-#define _AVCDEC_INT_H_
-
-#include "avcint_common.h"
-#include "avcdec_api.h"
-
-
-/**
-Bitstream structure contains bitstream related parameters such as the pointer
-to the buffer, the current byte position and bit position.
-@publishedAll
-*/
-typedef struct tagDecBitstream
-{
- uint8 *bitstreamBuffer; /* pointer to buffer memory */
- int nal_size; /* size of the current NAL unit */
- int data_end_pos; /* bitstreamBuffer size in bytes */
- int read_pos; /* next position to read from bitstreamBuffer */
- uint curr_word; /* byte-swapped (MSB left) current word read from buffer */
- int bit_left; /* number of bit left in current_word */
- uint next_word; /* in case for old data in previous buffer hasn't been flushed. */
- int incnt; /* bit left in the prev_word */
- int incnt_next;
- int bitcnt;
- void *userData;
-} AVCDecBitstream;
-
-/**
-This structure is the main object for AVC decoder library providing access to all
-global variables. It is allocated at PVAVCInitDecoder and freed at PVAVCCleanUpDecoder.
-@publishedAll
-*/
-typedef struct tagDecObject
-{
-
- AVCCommonObj *common;
-
- AVCDecBitstream *bitstream; /* for current NAL */
-
- /* sequence parameter set */
- AVCSeqParamSet *seqParams[32]; /* Array of pointers, get allocated at arrival of new seq_id */
-
- /* picture parameter set */
- AVCPicParamSet *picParams[256]; /* Array of pointers to picture param set structures */
-
- /* For internal operation, scratch memory for MV, prediction, transform, etc.*/
- uint ref_idx_l0[4]; /* [mbPartIdx], te(v) */
- uint ref_idx_l1[4];
-
- /* function pointers */
- AVCDec_Status(*residual_block)(struct tagDecObject*, int, int,
- int *, int *, int *);
- /* Application control data */
- AVCHandle *avcHandle;
- void (*AVC_DebugLog)(AVCLogType type, char *string1, char *string2);
- /*bool*/
- uint debugEnable;
-
-} AVCDecObject;
-
-#endif /* _AVCDEC_INT_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h b/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h
deleted file mode 100644
index fdead05..0000000
--- a/media/libstagefright/codecs/avc/dec/src/avcdec_lib.h
+++ /dev/null
@@ -1,555 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/**
-This file contains declarations of internal functions for AVC decoder library.
-@publishedAll
-*/
-#ifndef _AVCDEC_LIB_H_
-#define _AVCDEC_LIB_H_
-
-#include "avclib_common.h"
-#include "avcdec_int.h"
-
-/*----------- avcdec_api.c -------------*/
-/**
-This function takes out the emulation prevention bytes from the input to creat RBSP.
-The result is written over the input bitstream.
-\param "nal_unit" "(I/O) Pointer to the input buffer."
-\param "size" "(I/O) Pointer to the size of the input/output buffer."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status EBSPtoRBSP(uint8 *nal_unit, int *size);
-
-/*------------- pred_intra.c ---------------*/
-/**
-This function is the main entry point to intra prediction operation on a
-macroblock.
-\param "video" "Pointer to AVCCommonObj."
-*/
-AVCStatus IntraMBPrediction(AVCCommonObj *video);
-
-void SaveNeighborForIntraPred(AVCCommonObj *video, int offset);
-
-AVCStatus Intra_4x4(AVCCommonObj *video, int component, int SubBlock_indx, uint8 *comp);
-void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset);
-void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability);
-void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_offset);
-void Intra_16x16_Vertical(AVCCommonObj *video);
-void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch);
-void Intra_16x16_DC(AVCCommonObj *video, int pitch);
-void Intra_16x16_Plane(AVCCommonObj *video, int pitch);
-void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *predCr);
-void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr);
-
-/*------------ pred_inter.c ---------------*/
-/**
-This function is the main entrance to inter prediction operation for
-a macroblock. For decoding, this function also calls inverse transform and
-compensation.
-\param "video" "Pointer to AVCCommonObj."
-\return "void"
-*/
-void InterMBPrediction(AVCCommonObj *video);
-
-/**
-This function is called for luma motion compensation.
-\param "ref" "Pointer to the origin of a reference luma."
-\param "picwidth" "Width of the picture."
-\param "picheight" "Height of the picture."
-\param "x_pos" "X-coordinate of the predicted block in quarter pel resolution."
-\param "y_pos" "Y-coordinate of the predicted block in quarter pel resolution."
-\param "pred" "Pointer to the output predicted block."
-\param "pred_pitch" "Width of pred."
-\param "blkwidth" "Width of the current partition."
-\param "blkheight" "Height of the current partition."
-\return "void"
-*/
-void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight);
-
-/**
-Functions below are special cases for luma motion compensation.
-LumaFullPelMC is for full pixel motion compensation.
-LumaBorderMC is for interpolation in only one dimension.
-LumaCrossMC is for interpolation in one dimension and half point in the other dimension.
-LumaDiagonalMC is for interpolation in diagonal direction.
-
-\param "ref" "Pointer to the origin of a reference luma."
-\param "picwidth" "Width of the picture."
-\param "picheight" "Height of the picture."
-\param "x_pos" "X-coordinate of the predicted block in full pel resolution."
-\param "y_pos" "Y-coordinate of the predicted block in full pel resolution."
-\param "dx" "Fraction of x_pos in quarter pel."
-\param "dy" "Fraction of y_pos in quarter pel."
-\param "curr" "Pointer to the current partition in the current picture."
-\param "residue" "Pointer to the current partition for the residue block."
-\param "blkwidth" "Width of the current partition."
-\param "blkheight" "Height of the current partition."
-\return "void"
-*/
-void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y_pos,
- uint8 *out, int blkwidth, int blkheight);
-
-void FullPelMC(uint8 *in, int inwidth, uint8 *out, int outpitch,
- int blkwidth, int blkheight);
-
-void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx);
-
-void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx);
-
-void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight);
-
-void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy);
-
-void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight);
-
-void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy);
-
-void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
- uint8 *out, int outpitch,
- int blkwidth, int blkheight);
-
-
-void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight);
-
-void ChromaFullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight) ;
-void ChromaBorderMC(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
-void ChromaDiagonalMC(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight);
-
-void ChromaFullPelMCOutside(uint8 *ref, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight, int x_inc,
- int y_inc0, int y_inc1, int x_mid, int y_mid);
-void ChromaBorderMCOutside(uint8 *ref, int picwidth, int dx, int dy,
- uint8 *pred, int pred_pitch, int blkwidth, int blkheight,
- int x_inc, int z_inc, int y_inc0, int y_inc1, int x_mid, int y_mid);
-void ChromaDiagonalMCOutside(uint8 *ref, int picwidth,
- int dx, int dy, uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight, int x_inc, int z_inc,
- int y_inc0, int y_inc1, int x_mid, int y_mid);
-
-void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight);
-
-
-/*----------- slice.c ---------------*/
-/**
-This function performs the main decoding loop for slice data including
-INTRA/INTER prediction, transform and quantization and compensation.
-See decode_frame_slice() in JM.
-\param "video" "Pointer to AVCDecObject."
-\return "AVCDEC_SUCCESS for success, AVCDEC_PICTURE_READY for end-of-picture and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeSlice(AVCDecObject *video);
-AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end);
-/**
-This function performs the decoding of one macroblock.
-\param "video" "Pointer to AVCDecObject."
-\param "prevMbSkipped" "A value derived in 7.3.4."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeMB(AVCDecObject *video);
-
-/**
-This function performs macroblock prediction type decoding as in subclause 7.3.5.1.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to the current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function performs sub-macroblock prediction type decoding as in subclause 7.3.5.2.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to the current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success or AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_I_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_P_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_B_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the mb_type and sets necessary information
-when the slice type is AVC_SI_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type);
-
-/**
-This function interprets the sub_mb_type and sets necessary information
-when the slice type is AVC_P_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "sub_mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type);
-
-/**
-This function interprets the sub_mb_type and sets necessary information
-when the slice type is AVC_B_SLICE.
-in the macroblock structure.
-\param "mblock" "Pointer to current AVCMacroblock."
-\param "sub_mb_type" "From the syntax bitstream."
-\return "void"
-*/
-void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type);
-
-/**
-This function decodes the Intra4x4 prediction mode from neighboring information
-and from the decoded syntax.
-\param "video" "Pointer to AVCCommonObj."
-\param "currMB" "Pointer to current macroblock."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/*----------- vlc.c -------------------*/
-/**
-This function reads and decodes Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "codeNum" "Pointer to the value of the codeNum."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum);
-
-/**
-This function reads and decodes signed Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value);
-
-/**
-This function reads and decodes signed Exp-Golomb codes for
-32 bit codeword.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value);
-
-/**
-This function reads and decodes truncated Exp-Golomb codes.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "value" "Pointer to syntax element value."
-\param "range" "Range of the value as input to determine the algorithm."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range);
-
-/**
-This function parse Exp-Golomb code from the bitstream.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "leadingZeros" "Pointer to the number of leading zeros."
-\param "infobits" "Pointer to the value after leading zeros and the first one.
- The total number of bits read is 2*leadingZeros + 1."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status GetEGBitstring(AVCDecBitstream *bitstream, int *leadingZeros, int *infobits);
-
-/**
-This function parse Exp-Golomb code from the bitstream for 32 bit codewords.
-\param "bitstream" "Pointer to AVCDecBitstream."
-\param "leadingZeros" "Pointer to the number of leading zeros."
-\param "infobits" "Pointer to the value after leading zeros and the first one.
- The total number of bits read is 2*leadingZeros + 1."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits);
-
-/**
-This function performs CAVLC decoding of the CBP (coded block pattern) of a macroblock
-by calling ue_v() and then mapping the codeNum to the corresponding CBP value.
-\param "currMB" "Pointer to the current AVCMacroblock structure."
-\param "stream" "Pointer to the AVCDecBitstream."
-\return "void"
-*/
-AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream);
-
-/**
-This function decodes the syntax for trailing ones and total coefficient.
-Subject to optimization.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "TrailingOnes" "Pointer to the trailing one variable output."
-\param "TotalCoeff" "Pointer to the total coefficient variable output."
-\param "nC" "Context for number of nonzero coefficient (prediction context)."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC);
-
-/**
-This function decodes the syntax for trailing ones and total coefficient for
-chroma DC block. Subject to optimization.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "TrailingOnes" "Pointer to the trailing one variable output."
-\param "TotalCoeff" "Pointer to the total coefficient variable output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff);
-
-/**
-This function decode a VLC table with 2 output.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "lentab" "Table for code length."
-\param "codtab" "Table for code value."
-\param "tabwidth" "Width of the table or alphabet size of the first output."
-\param "tabheight" "Height of the table or alphabet size of the second output."
-\param "code1" "Pointer to the first output."
-\param "code2" "Pointer to the second output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status code_from_bitstream_2d(AVCDecBitstream *stream, int *lentab, int *codtab, int tabwidth,
- int tabheight, int *code1, int *code2);
-
-/**
-This function decodes the level_prefix VLC value as in Table 9-6.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code);
-
-/**
-This function decodes total_zeros VLC syntax as in Table 9-7 and 9-8.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "TotalCoeff" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff);
-
-/**
-This function decodes total_zeros VLC syntax for chroma DC as in Table 9-9.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "TotalCoeff" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff);
-
-/**
-This function decodes run_before VLC syntax as in Table 9-10.
-\param "stream" "Pointer to the AVCDecBitstream."
-\param "code" "Pointer to the output."
-\param "zeroLeft" "Context parameter."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zeroLeft);
-
-/*----------- header.c -------------------*/
-/**
-This function parses vui_parameters.
-\param "decvid" "Pointer to AVCDecObject."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCSeqParamSet *currSPS);
-AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream, uint payloadType, uint payloadSize);
-
-AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDecBitstream *stream);
-AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-
-/**
-This function parses hrd_parameters.
-\param "decvid" "Pointer to AVCDecObject."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCHRDParams *HRDParam);
-
-/**
-This function decodes the syntax in sequence parameter set slice and fill up the AVCSeqParamSet
-structure.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-/**
-This function decodes the syntax in picture parameter set and fill up the AVCPicParamSet
-structure.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS or AVCDEC_FAIL."
-*/
-AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
-AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream);
-
-/**
-This function decodes slice header, calls related functions such as
-reference picture list reordering, prediction weight table, decode ref marking.
-See FirstPartOfSliceHeader() and RestOfSliceHeader() in JM.
-\param "decvid" "Pointer to AVCDecObject."
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream);
-
-/**
-This function performes necessary operations to create dummy frames when
-there is a gap in frame_num.
-\param "video" "Pointer to AVCCommonObj."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video);
-
-/**
-This function decodes ref_pic_list_reordering related syntax and fill up the AVCSliceHeader
-structure.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\param "sliceHdr" "Pointer to AVCSliceHdr."
-\param "slice_type" "Value of slice_type - 5 if greater than 5."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr, int slice_type);
-
-/**
-This function decodes dec_ref_pic_marking related syntax and fill up the AVCSliceHeader
-structure.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-\param "sliceHdr" "Pointer to AVCSliceHdr."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-*/
-AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr);
-
-/**
-This function performs POC related operation prior to decoding a picture
-\param "video" "Pointer to AVCCommonObj."
-\return "AVCDEC_SUCCESS for success and AVCDEC_FAIL otherwise."
-See also PostPOC() for initialization of some variables.
-*/
-AVCDec_Status DecodePOC(AVCCommonObj *video);
-
-
-
-/*------------ residual.c ------------------*/
-/**
-This function decodes the intra pcm data and fill it in the corresponding location
-on the current picture.
-\param "video" "Pointer to AVCCommonObj."
-\param "stream" "Pointer to AVCDecBitstream."
-*/
-AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream);
-
-/**
-This function performs residual syntax decoding as well as quantization and transformation of
-the decoded coefficients. See subclause 7.3.5.3.
-\param "video" "Pointer to AVCDecObject."
-\param "currMB" "Pointer to current macroblock."
-*/
-AVCDec_Status residual(AVCDecObject *video, AVCMacroblock *currMB);
-
-/**
-This function performs CAVLC syntax decoding to get the run and level information of the coefficients.
-\param "video" "Pointer to AVCDecObject."
-\param "type" "One of AVCResidualType for a particular 4x4 block."
-\param "bx" "Horizontal block index."
-\param "by" "Vertical block index."
-\param "level" "Pointer to array of level for output."
-\param "run" "Pointer to array of run for output."
-\param "numcoeff" "Pointer to the total number of nonzero coefficients."
-\return "AVCDEC_SUCCESS for success."
-*/
-AVCDec_Status residual_block_cavlc(AVCDecObject *video, int nC, int maxNumCoeff,
- int *level, int *run, int *numcoeff);
-
-#endif /* _AVCDEC_LIB_H_ */
diff --git a/media/libstagefright/codecs/avc/dec/src/header.cpp b/media/libstagefright/codecs/avc/dec/src/header.cpp
deleted file mode 100644
index 8681e2b..0000000
--- a/media/libstagefright/codecs/avc/dec/src/header.cpp
+++ /dev/null
@@ -1,1391 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-#include "avcdec_api.h"
-
-/** see subclause 7.4.2.1 */
-AVCDec_Status DecodeSPS(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCDec_Status status = AVCDEC_SUCCESS;
- AVCSeqParamSet *seqParam;
- uint temp;
- int i;
- uint profile_idc, constrained_set0_flag, constrained_set1_flag, constrained_set2_flag;
- uint level_idc, seq_parameter_set_id;
- void *userData = decvid->avcHandle->userData;
- AVCHandle *avcHandle = decvid->avcHandle;
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "DecodeSPS", -1, -1);
-
- BitstreamReadBits(stream, 8, &profile_idc);
- BitstreamRead1Bit(stream, &constrained_set0_flag);
-// if (profile_idc != 66 && constrained_set0_flag != 1)
-// {
-// return AVCDEC_FAIL;
-// }
- BitstreamRead1Bit(stream, &constrained_set1_flag);
- BitstreamRead1Bit(stream, &constrained_set2_flag);
- BitstreamReadBits(stream, 5, &temp);
- BitstreamReadBits(stream, 8, &level_idc);
- if (level_idc > 51)
- {
- return AVCDEC_FAIL;
- }
- if (mapLev2Idx[level_idc] == 255)
- {
- return AVCDEC_FAIL;
- }
- ue_v(stream, &seq_parameter_set_id);
-
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
- /* Allocate sequence param set for seqParams[seq_parameter_set_id]. */
- if (decvid->seqParams[seq_parameter_set_id] == NULL) /* allocate seqParams[id] */
- {
- decvid->seqParams[seq_parameter_set_id] =
- (AVCSeqParamSet*) avcHandle->CBAVC_Malloc(userData, sizeof(AVCSeqParamSet), DEFAULT_ATTR);
-
- if (decvid->seqParams[seq_parameter_set_id] == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "done alloc seqParams", -1, -1);
-
- seqParam = decvid->seqParams[seq_parameter_set_id];
-
- seqParam->profile_idc = profile_idc;
- seqParam->constrained_set0_flag = constrained_set0_flag;
- seqParam->constrained_set1_flag = constrained_set1_flag;
- seqParam->constrained_set2_flag = constrained_set2_flag;
- seqParam->level_idc = level_idc;
- seqParam->seq_parameter_set_id = seq_parameter_set_id;
-
- /* continue decoding SPS */
- ue_v(stream, &(seqParam->log2_max_frame_num_minus4));
-
- if (seqParam->log2_max_frame_num_minus4 > 12)
- {
- return AVCDEC_FAIL;
- }
-
- ue_v(stream, &(seqParam->pic_order_cnt_type));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 1", seqParam->log2_max_frame_num_minus4, seqParam->pic_order_cnt_type);
-
- if (seqParam->pic_order_cnt_type == 0)
- {
- ue_v(stream, &(seqParam->log2_max_pic_order_cnt_lsb_minus4));
- }
- else if (seqParam->pic_order_cnt_type == 1)
- { // MC_CHECK
- BitstreamRead1Bit(stream, (uint*)&(seqParam->delta_pic_order_always_zero_flag));
- se_v32bit(stream, &(seqParam->offset_for_non_ref_pic));
- se_v32bit(stream, &(seqParam->offset_for_top_to_bottom_field));
- ue_v(stream, &(seqParam->num_ref_frames_in_pic_order_cnt_cycle));
-
- for (i = 0; i < (int)(seqParam->num_ref_frames_in_pic_order_cnt_cycle); i++)
- {
- se_v32bit(stream, &(seqParam->offset_for_ref_frame[i]));
- }
- }
-
- ue_v(stream, &(seqParam->num_ref_frames));
-
- if (seqParam->num_ref_frames > 16)
- {
- return AVCDEC_FAIL;
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 2", seqParam->num_ref_frames, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->gaps_in_frame_num_value_allowed_flag));
- ue_v(stream, &(seqParam->pic_width_in_mbs_minus1));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "picwidth", seqParam->pic_width_in_mbs_minus1, -1);
-
- ue_v(stream, &(seqParam->pic_height_in_map_units_minus1));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "picwidth", seqParam->pic_height_in_map_units_minus1, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->frame_mbs_only_flag));
-
- seqParam->mb_adaptive_frame_field_flag = 0; /* default value */
- if (!seqParam->frame_mbs_only_flag)
- {
- BitstreamRead1Bit(stream, (uint*)&(seqParam->mb_adaptive_frame_field_flag));
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 3", seqParam->frame_mbs_only_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->direct_8x8_inference_flag));
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 4", seqParam->direct_8x8_inference_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->frame_cropping_flag));
- seqParam->frame_crop_left_offset = 0; /* default value */
- seqParam->frame_crop_right_offset = 0;/* default value */
- seqParam->frame_crop_top_offset = 0;/* default value */
- seqParam->frame_crop_bottom_offset = 0;/* default value */
- if (seqParam->frame_cropping_flag)
- {
- ue_v(stream, &(seqParam->frame_crop_left_offset));
- ue_v(stream, &(seqParam->frame_crop_right_offset));
- ue_v(stream, &(seqParam->frame_crop_top_offset));
- ue_v(stream, &(seqParam->frame_crop_bottom_offset));
- }
-
- DEBUG_LOG(userData, AVC_LOGTYPE_INFO, "check point 5", seqParam->frame_cropping_flag, -1);
-
- BitstreamRead1Bit(stream, (uint*)&(seqParam->vui_parameters_present_flag));
- if (seqParam->vui_parameters_present_flag)
- {
- status = vui_parameters(decvid, stream, seqParam);
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
- }
-
- return status;
-}
-
-
-AVCDec_Status vui_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCSeqParamSet *currSPS)
-{
- uint temp;
- uint temp32;
- uint aspect_ratio_idc, overscan_appopriate_flag, video_format, video_full_range_flag;
- /* aspect_ratio_info_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamReadBits(stream, 8, &aspect_ratio_idc);
- if (aspect_ratio_idc == 255)
- {
- /* sar_width */
- BitstreamReadBits(stream, 16, &temp);
- /* sar_height */
- BitstreamReadBits(stream, 16, &temp);
- }
- }
- /* overscan_info_present */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamRead1Bit(stream, &overscan_appopriate_flag);
- }
- /* video_signal_type_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- BitstreamReadBits(stream, 3, &video_format);
- BitstreamRead1Bit(stream, &video_full_range_flag);
- /* colour_description_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* colour_primaries */
- BitstreamReadBits(stream, 8, &temp);
- /* transfer_characteristics */
- BitstreamReadBits(stream, 8, &temp);
- /* matrix coefficients */
- BitstreamReadBits(stream, 8, &temp);
- }
- }
- /* chroma_loc_info_present_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* chroma_sample_loc_type_top_field */
- ue_v(stream, &temp);
- /* chroma_sample_loc_type_bottom_field */
- ue_v(stream, &temp);
- }
-
- /* timing_info_present_flag*/
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* num_unit_in_tick*/
- BitstreamReadBits(stream, 32, &temp32);
- /* time_scale */
- BitstreamReadBits(stream, 32, &temp32);
- /* fixed_frame_rate_flag */
- BitstreamRead1Bit(stream, &temp);
- }
-
- /* nal_hrd_parameters_present_flag */
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.nal_hrd_parameters_present_flag = temp;
- if (temp)
- {
- hrd_parameters(decvid, stream, &(currSPS->vui_parameters.nal_hrd_parameters));
- }
- /* vcl_hrd_parameters_present_flag*/
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.vcl_hrd_parameters_present_flag = temp;
- if (temp)
- {
- hrd_parameters(decvid, stream, &(currSPS->vui_parameters.vcl_hrd_parameters));
- }
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag || currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- /* low_delay_hrd_flag */
- BitstreamRead1Bit(stream, &temp);
- }
- /* pic_struct_present_flag */
- BitstreamRead1Bit(stream, &temp);
- currSPS->vui_parameters.pic_struct_present_flag = temp;
- /* bitstream_restriction_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* motion_vectors_over_pic_boundaries_flag */
- BitstreamRead1Bit(stream, &temp);
- /* max_bytes_per_pic_denom */
- ue_v(stream, &temp);
- /* max_bits_per_mb_denom */
- ue_v(stream, &temp);
- /* log2_max_mv_length_horizontal */
- ue_v(stream, &temp);
- /* log2_max_mv_length_vertical */
- ue_v(stream, &temp);
- /* num_reorder_frames */
- ue_v(stream, &temp);
- /* max_dec_frame_buffering */
- ue_v(stream, &temp);
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status hrd_parameters(AVCDecObject *decvid, AVCDecBitstream *stream, AVCHRDParams *HRDParam)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp;
- uint cpb_cnt_minus1;
- uint i;
- ue_v(stream, &cpb_cnt_minus1);
- HRDParam->cpb_cnt_minus1 = cpb_cnt_minus1;
- /* bit_rate_scale */
- BitstreamReadBits(stream, 4, &temp);
- /* cpb_size_scale */
- BitstreamReadBits(stream, 4, &temp);
- for (i = 0; i <= cpb_cnt_minus1; i++)
- {
- /* bit_rate_value_minus1[i] */
- ue_v(stream, &temp);
- /* cpb_size_value_minus1[i] */
- ue_v(stream, &temp);
- /* cbr_flag[i] */
- ue_v(stream, &temp);
- }
- /* initial_cpb_removal_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- /* cpb_removal_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->cpb_removal_delay_length_minus1 = temp;
- /* dpb_output_delay_length_minus1 */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->dpb_output_delay_length_minus1 = temp;
- /* time_offset_length */
- BitstreamReadBits(stream, 5, &temp);
- HRDParam->time_offset_length = temp;
- return AVCDEC_SUCCESS;
-}
-
-
-/** see subclause 7.4.2.2 */
-AVCDec_Status DecodePPS(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCPicParamSet *picParam;
- AVCDec_Status status;
- int i, iGroup, numBits;
- int PicWidthInMbs, PicHeightInMapUnits, PicSizeInMapUnits;
- uint pic_parameter_set_id, seq_parameter_set_id;
- void *userData = decvid->avcHandle->userData;
- AVCHandle *avcHandle = decvid->avcHandle;
-
- ue_v(stream, &pic_parameter_set_id);
- if (pic_parameter_set_id > 255)
- {
- return AVCDEC_FAIL;
- }
-
- ue_v(stream, &seq_parameter_set_id);
-
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
- /* 2.1 if picParams[pic_param_set_id] is NULL, allocate it. */
- if (decvid->picParams[pic_parameter_set_id] == NULL)
- {
- decvid->picParams[pic_parameter_set_id] =
- (AVCPicParamSet*)avcHandle->CBAVC_Malloc(userData, sizeof(AVCPicParamSet), DEFAULT_ATTR);
- if (decvid->picParams[pic_parameter_set_id] == NULL)
- {
- return AVCDEC_MEMORY_FAIL;
- }
-
- decvid->picParams[pic_parameter_set_id]->slice_group_id = NULL;
- }
-
- video->currPicParams = picParam = decvid->picParams[pic_parameter_set_id];
- picParam->seq_parameter_set_id = seq_parameter_set_id;
- picParam->pic_parameter_set_id = pic_parameter_set_id;
-
- BitstreamRead1Bit(stream, (uint*)&(picParam->entropy_coding_mode_flag));
- if (picParam->entropy_coding_mode_flag)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
- BitstreamRead1Bit(stream, (uint*)&(picParam->pic_order_present_flag));
- ue_v(stream, &(picParam->num_slice_groups_minus1));
-
- if (picParam->num_slice_groups_minus1 > MAX_NUM_SLICE_GROUP - 1)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
- picParam->slice_group_change_rate_minus1 = 0; /* default value */
- if (picParam->num_slice_groups_minus1 > 0)
- {
- ue_v(stream, &(picParam->slice_group_map_type));
- if (picParam->slice_group_map_type == 0)
- {
- for (iGroup = 0; iGroup <= (int)picParam->num_slice_groups_minus1; iGroup++)
- {
- ue_v(stream, &(picParam->run_length_minus1[iGroup]));
- }
- }
- else if (picParam->slice_group_map_type == 2)
- { // MC_CHECK <= or <
- for (iGroup = 0; iGroup < (int)picParam->num_slice_groups_minus1; iGroup++)
- {
- ue_v(stream, &(picParam->top_left[iGroup]));
- ue_v(stream, &(picParam->bottom_right[iGroup]));
- }
- }
- else if (picParam->slice_group_map_type == 3 ||
- picParam->slice_group_map_type == 4 ||
- picParam->slice_group_map_type == 5)
- {
- BitstreamRead1Bit(stream, (uint*)&(picParam->slice_group_change_direction_flag));
- ue_v(stream, &(picParam->slice_group_change_rate_minus1));
- }
- else if (picParam->slice_group_map_type == 6)
- {
- ue_v(stream, &(picParam->pic_size_in_map_units_minus1));
-
- numBits = 0;/* ceil(log2(num_slice_groups_minus1+1)) bits */
- i = picParam->num_slice_groups_minus1;
- while (i > 0)
- {
- numBits++;
- i >>= 1;
- }
-
- i = picParam->seq_parameter_set_id;
- if (decvid->seqParams[i] == NULL)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
-
- PicWidthInMbs = decvid->seqParams[i]->pic_width_in_mbs_minus1 + 1;
- PicHeightInMapUnits = decvid->seqParams[i]->pic_height_in_map_units_minus1 + 1 ;
- PicSizeInMapUnits = PicWidthInMbs * PicHeightInMapUnits ;
-
- /* information has to be consistent with the seq_param */
- if ((int)picParam->pic_size_in_map_units_minus1 != PicSizeInMapUnits - 1)
- {
- status = AVCDEC_FAIL;
- goto clean_up;
- }
-
- if (picParam->slice_group_id)
- {
- avcHandle->CBAVC_Free(userData, (int)picParam->slice_group_id);
- }
- picParam->slice_group_id = (uint*)avcHandle->CBAVC_Malloc(userData, sizeof(uint) * PicSizeInMapUnits, DEFAULT_ATTR);
- if (picParam->slice_group_id == NULL)
- {
- status = AVCDEC_MEMORY_FAIL;
- goto clean_up;
- }
-
- for (i = 0; i < PicSizeInMapUnits; i++)
- {
- BitstreamReadBits(stream, numBits, &(picParam->slice_group_id[i]));
- }
- }
-
- }
-
- ue_v(stream, &(picParam->num_ref_idx_l0_active_minus1));
- if (picParam->num_ref_idx_l0_active_minus1 > 31)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- ue_v(stream, &(picParam->num_ref_idx_l1_active_minus1));
- if (picParam->num_ref_idx_l1_active_minus1 > 31)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- BitstreamRead1Bit(stream, (uint*)&(picParam->weighted_pred_flag));
- BitstreamReadBits(stream, 2, &(picParam->weighted_bipred_idc));
- if (picParam->weighted_bipred_idc > 2)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->pic_init_qp_minus26));
- if (picParam->pic_init_qp_minus26 < -26 || picParam->pic_init_qp_minus26 > 25)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->pic_init_qs_minus26));
- if (picParam->pic_init_qs_minus26 < -26 || picParam->pic_init_qs_minus26 > 25)
- {
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- se_v(stream, &(picParam->chroma_qp_index_offset));
- if (picParam->chroma_qp_index_offset < -12 || picParam->chroma_qp_index_offset > 12)
- {
- status = AVCDEC_FAIL; /* out of range */
- status = AVCDEC_FAIL; /* out of range */
- goto clean_up;
- }
-
- BitstreamReadBits(stream, 3, &pic_parameter_set_id);
- picParam->deblocking_filter_control_present_flag = pic_parameter_set_id >> 2;
- picParam->constrained_intra_pred_flag = (pic_parameter_set_id >> 1) & 1;
- picParam->redundant_pic_cnt_present_flag = pic_parameter_set_id & 1;
-
- return AVCDEC_SUCCESS;
-clean_up:
- if (decvid->picParams[pic_parameter_set_id])
- {
- if (picParam->slice_group_id)
- {
- avcHandle->CBAVC_Free(userData, (int)picParam->slice_group_id);
- }
- decvid->picParams[pic_parameter_set_id]->slice_group_id = NULL;
- avcHandle->CBAVC_Free(userData, (int)decvid->picParams[pic_parameter_set_id]);
- decvid->picParams[pic_parameter_set_id] = NULL;
- return status;
- }
- return AVCDEC_SUCCESS;
-}
-
-
-/* FirstPartOfSliceHeader();
- RestOfSliceHeader() */
-/** see subclause 7.4.3 */
-AVCDec_Status DecodeSliceHeader(AVCDecObject *decvid, AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- AVCPicParamSet *currPPS;
- AVCSeqParamSet *currSPS;
- AVCDec_Status status;
- uint idr_pic_id;
- int slice_type, temp, i;
-
- ue_v(stream, &(sliceHdr->first_mb_in_slice));
- ue_v(stream, (uint*)&slice_type);
-
- if (sliceHdr->first_mb_in_slice != 0)
- {
- if ((int)sliceHdr->slice_type >= 5 && slice_type != (int)sliceHdr->slice_type - 5)
- {
- return AVCDEC_FAIL; /* slice type doesn't follow the first slice in the picture */
- }
- }
- sliceHdr->slice_type = (AVCSliceType) slice_type;
- if (slice_type > 4)
- {
- slice_type -= 5;
- }
-
- if (slice_type == 1 || slice_type > 2)
- {
- return AVCDEC_FAIL;
- }
-
- video->slice_type = (AVCSliceType) slice_type;
-
- ue_v(stream, &(sliceHdr->pic_parameter_set_id));
- /* end FirstPartSliceHeader() */
- /* begin RestOfSliceHeader() */
- /* after getting pic_parameter_set_id, we have to load corresponding SPS and PPS */
- if (sliceHdr->pic_parameter_set_id > 255)
- {
- return AVCDEC_FAIL;
- }
-
- if (decvid->picParams[sliceHdr->pic_parameter_set_id] == NULL)
- return AVCDEC_FAIL; /* PPS doesn't exist */
-
- currPPS = video->currPicParams = decvid->picParams[sliceHdr->pic_parameter_set_id];
-
- if (decvid->seqParams[currPPS->seq_parameter_set_id] == NULL)
- return AVCDEC_FAIL; /* SPS doesn't exist */
-
- currSPS = video->currSeqParams = decvid->seqParams[currPPS->seq_parameter_set_id];
-
- if (currPPS->seq_parameter_set_id != video->seq_parameter_set_id)
- {
- video->seq_parameter_set_id = currPPS->seq_parameter_set_id;
- status = (AVCDec_Status)AVCConfigureSequence(decvid->avcHandle, video, false);
- if (status != AVCDEC_SUCCESS)
- return status;
- video->level_idc = currSPS->level_idc;
- }
-
- /* derived variables from SPS */
- video->MaxFrameNum = 1 << (currSPS->log2_max_frame_num_minus4 + 4);
- // MC_OPTIMIZE
- video->PicWidthInMbs = currSPS->pic_width_in_mbs_minus1 + 1;
- video->PicWidthInSamplesL = video->PicWidthInMbs * 16 ;
- video->PicWidthInSamplesC = video->PicWidthInMbs * 8 ;
- video->PicHeightInMapUnits = currSPS->pic_height_in_map_units_minus1 + 1 ;
- video->PicSizeInMapUnits = video->PicWidthInMbs * video->PicHeightInMapUnits ;
- video->FrameHeightInMbs = (2 - currSPS->frame_mbs_only_flag) * video->PicHeightInMapUnits ;
-
- /* derived from PPS */
- video->SliceGroupChangeRate = currPPS->slice_group_change_rate_minus1 + 1;
-
- /* then we can continue decoding slice header */
-
- BitstreamReadBits(stream, currSPS->log2_max_frame_num_minus4 + 4, &(sliceHdr->frame_num));
-
- if (video->currFS == NULL && sliceHdr->frame_num != 0)
- {
- video->prevFrameNum = video->PrevRefFrameNum = sliceHdr->frame_num - 1;
- }
-
- if (!currSPS->frame_mbs_only_flag)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->field_pic_flag));
- if (sliceHdr->field_pic_flag)
- {
- return AVCDEC_FAIL;
- }
- }
-
- /* derived variables from slice header*/
- video->PicHeightInMbs = video->FrameHeightInMbs;
- video->PicHeightInSamplesL = video->PicHeightInMbs * 16;
- video->PicHeightInSamplesC = video->PicHeightInMbs * 8;
- video->PicSizeInMbs = video->PicWidthInMbs * video->PicHeightInMbs;
-
- if (sliceHdr->first_mb_in_slice >= video->PicSizeInMbs)
- {
- return AVCDEC_FAIL;
- }
- video->MaxPicNum = video->MaxFrameNum;
- video->CurrPicNum = sliceHdr->frame_num;
-
-
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- if (sliceHdr->frame_num != 0)
- {
- return AVCDEC_FAIL;
- }
- ue_v(stream, &idr_pic_id);
- }
-
- sliceHdr->delta_pic_order_cnt_bottom = 0; /* default value */
- sliceHdr->delta_pic_order_cnt[0] = 0; /* default value */
- sliceHdr->delta_pic_order_cnt[1] = 0; /* default value */
- if (currSPS->pic_order_cnt_type == 0)
- {
- BitstreamReadBits(stream, currSPS->log2_max_pic_order_cnt_lsb_minus4 + 4,
- &(sliceHdr->pic_order_cnt_lsb));
- video->MaxPicOrderCntLsb = 1 << (currSPS->log2_max_pic_order_cnt_lsb_minus4 + 4);
- if (sliceHdr->pic_order_cnt_lsb > video->MaxPicOrderCntLsb - 1)
- return AVCDEC_FAIL; /* out of range */
-
- if (currPPS->pic_order_present_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt_bottom));
- }
- }
- if (currSPS->pic_order_cnt_type == 1 && !currSPS->delta_pic_order_always_zero_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt[0]));
- if (currPPS->pic_order_present_flag)
- {
- se_v32bit(stream, &(sliceHdr->delta_pic_order_cnt[1]));
- }
- }
-
- sliceHdr->redundant_pic_cnt = 0; /* default value */
- if (currPPS->redundant_pic_cnt_present_flag)
- {
- // MC_CHECK
- ue_v(stream, &(sliceHdr->redundant_pic_cnt));
- if (sliceHdr->redundant_pic_cnt > 127) /* out of range */
- return AVCDEC_FAIL;
-
- if (sliceHdr->redundant_pic_cnt > 0) /* redundant picture */
- return AVCDEC_FAIL; /* not supported */
- }
- sliceHdr->num_ref_idx_l0_active_minus1 = currPPS->num_ref_idx_l0_active_minus1;
- sliceHdr->num_ref_idx_l1_active_minus1 = currPPS->num_ref_idx_l1_active_minus1;
-
- if (slice_type == AVC_P_SLICE)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->num_ref_idx_active_override_flag));
- if (sliceHdr->num_ref_idx_active_override_flag)
- {
- ue_v(stream, &(sliceHdr->num_ref_idx_l0_active_minus1));
- }
- else /* the following condition is not allowed if the flag is zero */
- {
- if ((slice_type == AVC_P_SLICE) && currPPS->num_ref_idx_l0_active_minus1 > 15)
- {
- return AVCDEC_FAIL; /* not allowed */
- }
- }
- }
-
-
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 15 ||
- sliceHdr->num_ref_idx_l1_active_minus1 > 15)
- {
- return AVCDEC_FAIL; /* not allowed */
- }
- /* if MbaffFrameFlag =1,
- max value of index is num_ref_idx_l0_active_minus1 for frame MBs and
- 2*sliceHdr->num_ref_idx_l0_active_minus1 + 1 for field MBs */
-
- /* ref_pic_list_reordering() */
- status = ref_pic_list_reordering(video, stream, sliceHdr, slice_type);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
-
- if (video->nal_ref_idc != 0)
- {
- dec_ref_pic_marking(video, stream, sliceHdr);
- }
- se_v(stream, &(sliceHdr->slice_qp_delta));
-
- video->QPy = 26 + currPPS->pic_init_qp_minus26 + sliceHdr->slice_qp_delta;
- if (video->QPy > 51 || video->QPy < 0)
- {
- video->QPy = AVC_CLIP3(0, 51, video->QPy);
-// return AVCDEC_FAIL;
- }
- video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
-
- video->QPy_div_6 = (video->QPy * 43) >> 8;
- video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
-
- video->QPc_div_6 = (video->QPc * 43) >> 8;
- video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
-
- sliceHdr->slice_alpha_c0_offset_div2 = 0;
- sliceHdr->slice_beta_offset_div_2 = 0;
- sliceHdr->disable_deblocking_filter_idc = 0;
- video->FilterOffsetA = video->FilterOffsetB = 0;
-
- if (currPPS->deblocking_filter_control_present_flag)
- {
- ue_v(stream, &(sliceHdr->disable_deblocking_filter_idc));
- if (sliceHdr->disable_deblocking_filter_idc > 2)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- if (sliceHdr->disable_deblocking_filter_idc != 1)
- {
- se_v(stream, &(sliceHdr->slice_alpha_c0_offset_div2));
- if (sliceHdr->slice_alpha_c0_offset_div2 < -6 ||
- sliceHdr->slice_alpha_c0_offset_div2 > 6)
- {
- return AVCDEC_FAIL;
- }
- video->FilterOffsetA = sliceHdr->slice_alpha_c0_offset_div2 << 1;
-
- se_v(stream, &(sliceHdr->slice_beta_offset_div_2));
- if (sliceHdr->slice_beta_offset_div_2 < -6 ||
- sliceHdr->slice_beta_offset_div_2 > 6)
- {
- return AVCDEC_FAIL;
- }
- video->FilterOffsetB = sliceHdr->slice_beta_offset_div_2 << 1;
- }
- }
-
- if (currPPS->num_slice_groups_minus1 > 0 && currPPS->slice_group_map_type >= 3
- && currPPS->slice_group_map_type <= 5)
- {
- /* Ceil(Log2(PicSizeInMapUnits/(float)SliceGroupChangeRate + 1)) */
- temp = video->PicSizeInMapUnits / video->SliceGroupChangeRate;
- if (video->PicSizeInMapUnits % video->SliceGroupChangeRate)
- {
- temp++;
- }
- i = 0;
- temp++;
- while (temp)
- {
- temp >>= 1;
- i++;
- }
-
- BitstreamReadBits(stream, i, &(sliceHdr->slice_group_change_cycle));
- video->MapUnitsInSliceGroup0 =
- AVC_MIN(sliceHdr->slice_group_change_cycle * video->SliceGroupChangeRate, video->PicSizeInMapUnits);
- }
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status fill_frame_num_gap(AVCHandle *avcHandle, AVCCommonObj *video)
-{
- AVCDec_Status status;
- int CurrFrameNum;
- int UnusedShortTermFrameNum;
- int tmp1 = video->sliceHdr->delta_pic_order_cnt[0];
- int tmp2 = video->sliceHdr->delta_pic_order_cnt[1];
- int tmp3 = video->CurrPicNum;
- int tmp4 = video->sliceHdr->adaptive_ref_pic_marking_mode_flag;
- UnusedShortTermFrameNum = (video->prevFrameNum + 1) % video->MaxFrameNum;
- CurrFrameNum = video->sliceHdr->frame_num;
-
- video->sliceHdr->delta_pic_order_cnt[0] = 0;
- video->sliceHdr->delta_pic_order_cnt[1] = 0;
- while (CurrFrameNum != UnusedShortTermFrameNum)
- {
- video->CurrPicNum = UnusedShortTermFrameNum;
- video->sliceHdr->frame_num = UnusedShortTermFrameNum;
-
- status = (AVCDec_Status)DPBInitBuffer(avcHandle, video);
- if (status != AVCDEC_SUCCESS) /* no buffer available */
- {
- return status;
- }
- DecodePOC(video);
- DPBInitPic(video, UnusedShortTermFrameNum);
-
-
- video->currFS->PicOrderCnt = video->PicOrderCnt;
- video->currFS->FrameNum = video->sliceHdr->frame_num;
-
- /* initialize everything to zero */
- video->currFS->IsOutputted = 0x01;
- video->currFS->IsReference = 3;
- video->currFS->IsLongTerm = 0;
- video->currFS->frame.isReference = TRUE;
- video->currFS->frame.isLongTerm = FALSE;
-
- video->sliceHdr->adaptive_ref_pic_marking_mode_flag = 0;
-
- status = (AVCDec_Status)StorePictureInDPB(avcHandle, video); // MC_CHECK check the return status
- if (status != AVCDEC_SUCCESS)
- {
- return AVCDEC_FAIL;
- }
- video->prevFrameNum = UnusedShortTermFrameNum;
- UnusedShortTermFrameNum = (UnusedShortTermFrameNum + 1) % video->MaxFrameNum;
- }
- video->sliceHdr->frame_num = CurrFrameNum;
- video->CurrPicNum = tmp3;
- video->sliceHdr->delta_pic_order_cnt[0] = tmp1;
- video->sliceHdr->delta_pic_order_cnt[1] = tmp2;
- video->sliceHdr->adaptive_ref_pic_marking_mode_flag = tmp4;
- return AVCDEC_SUCCESS;
-}
-
-/** see subclause 7.4.3.1 */
-AVCDec_Status ref_pic_list_reordering(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr, int slice_type)
-{
- int i;
-
- if (slice_type != AVC_I_SLICE)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->ref_pic_list_reordering_flag_l0));
- if (sliceHdr->ref_pic_list_reordering_flag_l0)
- {
- i = 0;
- do
- {
- ue_v(stream, &(sliceHdr->reordering_of_pic_nums_idc_l0[i]));
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 0 ||
- sliceHdr->reordering_of_pic_nums_idc_l0[i] == 1)
- {
- ue_v(stream, &(sliceHdr->abs_diff_pic_num_minus1_l0[i]));
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 0 &&
- sliceHdr->abs_diff_pic_num_minus1_l0[i] > video->MaxPicNum / 2 - 1)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 1 &&
- sliceHdr->abs_diff_pic_num_minus1_l0[i] > video->MaxPicNum / 2 - 2)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- }
- else if (sliceHdr->reordering_of_pic_nums_idc_l0[i] == 2)
- {
- ue_v(stream, &(sliceHdr->long_term_pic_num_l0[i]));
- }
- i++;
- }
- while (sliceHdr->reordering_of_pic_nums_idc_l0[i-1] != 3
- && i <= (int)sliceHdr->num_ref_idx_l0_active_minus1 + 1) ;
- }
- }
- return AVCDEC_SUCCESS;
-}
-
-/** see subclause 7.4.3.3 */
-AVCDec_Status dec_ref_pic_marking(AVCCommonObj *video, AVCDecBitstream *stream, AVCSliceHeader *sliceHdr)
-{
- int i;
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- BitstreamRead1Bit(stream, &(sliceHdr->no_output_of_prior_pics_flag));
- BitstreamRead1Bit(stream, &(sliceHdr->long_term_reference_flag));
- if (sliceHdr->long_term_reference_flag == 0) /* used for short-term */
- {
- video->MaxLongTermFrameIdx = -1; /* no long-term frame indx */
- }
- else /* used for long-term */
- {
- video->MaxLongTermFrameIdx = 0;
- video->LongTermFrameIdx = 0;
- }
- }
- else
- {
- BitstreamRead1Bit(stream, &(sliceHdr->adaptive_ref_pic_marking_mode_flag));
- if (sliceHdr->adaptive_ref_pic_marking_mode_flag)
- {
- i = 0;
- do
- {
- ue_v(stream, &(sliceHdr->memory_management_control_operation[i]));
- if (sliceHdr->memory_management_control_operation[i] == 1 ||
- sliceHdr->memory_management_control_operation[i] == 3)
- {
- ue_v(stream, &(sliceHdr->difference_of_pic_nums_minus1[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 2)
- {
- ue_v(stream, &(sliceHdr->long_term_pic_num[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 3 ||
- sliceHdr->memory_management_control_operation[i] == 6)
- {
- ue_v(stream, &(sliceHdr->long_term_frame_idx[i]));
- }
- if (sliceHdr->memory_management_control_operation[i] == 4)
- {
- ue_v(stream, &(sliceHdr->max_long_term_frame_idx_plus1[i]));
- }
- i++;
- }
- while (sliceHdr->memory_management_control_operation[i-1] != 0 && i < MAX_DEC_REF_PIC_MARKING);
- if (i >= MAX_DEC_REF_PIC_MARKING)
- {
- return AVCDEC_FAIL; /* we're screwed!!, not enough memory */
- }
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 8.2.1 Decoding process for picture order count. */
-AVCDec_Status DecodePOC(AVCCommonObj *video)
-{
- AVCSeqParamSet *currSPS = video->currSeqParams;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- int i;
-
- switch (currSPS->pic_order_cnt_type)
- {
- case 0: /* POC MODE 0 , subclause 8.2.1.1 */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevPicOrderCntMsb = 0;
- video->prevPicOrderCntLsb = 0;
- }
-
- /* Calculate the MSBs of current picture */
- if (sliceHdr->pic_order_cnt_lsb < video->prevPicOrderCntLsb &&
- (video->prevPicOrderCntLsb - sliceHdr->pic_order_cnt_lsb) >= (video->MaxPicOrderCntLsb / 2))
- video->PicOrderCntMsb = video->prevPicOrderCntMsb + video->MaxPicOrderCntLsb;
- else if (sliceHdr->pic_order_cnt_lsb > video->prevPicOrderCntLsb &&
- (sliceHdr->pic_order_cnt_lsb - video->prevPicOrderCntLsb) > (video->MaxPicOrderCntLsb / 2))
- video->PicOrderCntMsb = video->prevPicOrderCntMsb - video->MaxPicOrderCntLsb;
- else
- video->PicOrderCntMsb = video->prevPicOrderCntMsb;
-
- /* JVT-I010 page 81 is different from JM7.3 */
-
-
- video->PicOrderCnt = video->TopFieldOrderCnt = video->PicOrderCntMsb + sliceHdr->pic_order_cnt_lsb;
- video->BottomFieldOrderCnt = video->TopFieldOrderCnt + sliceHdr->delta_pic_order_cnt_bottom;
-
- break;
-
-
- case 1: /* POC MODE 1, subclause 8.2.1.2 */
- /* calculate FrameNumOffset */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->prevFrameNumOffset = 0;
- video->FrameNumOffset = 0;
- }
- else if (video->prevFrameNum > sliceHdr->frame_num)
- {
- video->FrameNumOffset = video->prevFrameNumOffset + video->MaxFrameNum;
- }
- else
- {
- video->FrameNumOffset = video->prevFrameNumOffset;
- }
- /* calculate absFrameNum */
- if (currSPS->num_ref_frames_in_pic_order_cnt_cycle)
- {
- video->absFrameNum = video->FrameNumOffset + sliceHdr->frame_num;
- }
- else
- {
- video->absFrameNum = 0;
- }
-
- if (video->absFrameNum > 0 && video->nal_ref_idc == 0)
- {
- video->absFrameNum--;
- }
-
- /* derive picOrderCntCycleCnt and frameNumInPicOrderCntCycle */
- if (video->absFrameNum > 0)
- {
- video->picOrderCntCycleCnt = (video->absFrameNum - 1) / currSPS->num_ref_frames_in_pic_order_cnt_cycle;
- video->frameNumInPicOrderCntCycle = (video->absFrameNum - 1) % currSPS->num_ref_frames_in_pic_order_cnt_cycle;
- }
- /* derive expectedDeltaPerPicOrderCntCycle */
- video->expectedDeltaPerPicOrderCntCycle = 0;
- for (i = 0; i < (int)currSPS->num_ref_frames_in_pic_order_cnt_cycle; i++)
- {
- video->expectedDeltaPerPicOrderCntCycle += currSPS->offset_for_ref_frame[i];
- }
- /* derive expectedPicOrderCnt */
- if (video->absFrameNum)
- {
- video->expectedPicOrderCnt = video->picOrderCntCycleCnt * video->expectedDeltaPerPicOrderCntCycle;
- for (i = 0; i <= video->frameNumInPicOrderCntCycle; i++)
- {
- video->expectedPicOrderCnt += currSPS->offset_for_ref_frame[i];
- }
- }
- else
- {
- video->expectedPicOrderCnt = 0;
- }
-
- if (video->nal_ref_idc == 0)
- {
- video->expectedPicOrderCnt += currSPS->offset_for_non_ref_pic;
- }
- /* derive TopFieldOrderCnt and BottomFieldOrderCnt */
-
- video->TopFieldOrderCnt = video->expectedPicOrderCnt + sliceHdr->delta_pic_order_cnt[0];
- video->BottomFieldOrderCnt = video->TopFieldOrderCnt + currSPS->offset_for_top_to_bottom_field + sliceHdr->delta_pic_order_cnt[1];
-
- video->PicOrderCnt = AVC_MIN(video->TopFieldOrderCnt, video->BottomFieldOrderCnt);
-
-
- break;
-
-
- case 2: /* POC MODE 2, subclause 8.2.1.3 */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->FrameNumOffset = 0;
- }
- else if (video->prevFrameNum > sliceHdr->frame_num)
- {
- video->FrameNumOffset = video->prevFrameNumOffset + video->MaxFrameNum;
- }
- else
- {
- video->FrameNumOffset = video->prevFrameNumOffset;
- }
- /* derive tempPicOrderCnt, we just use PicOrderCnt */
- if (video->nal_unit_type == AVC_NALTYPE_IDR)
- {
- video->PicOrderCnt = 0;
- }
- else if (video->nal_ref_idc == 0)
- {
- video->PicOrderCnt = 2 * (video->FrameNumOffset + sliceHdr->frame_num) - 1;
- }
- else
- {
- video->PicOrderCnt = 2 * (video->FrameNumOffset + sliceHdr->frame_num);
- }
- video->TopFieldOrderCnt = video->BottomFieldOrderCnt = video->PicOrderCnt;
- break;
- default:
- return AVCDEC_FAIL;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status DecodeSEI(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- OSCL_UNUSED_ARG(stream);
- return AVCDEC_SUCCESS;
-}
-
-AVCDec_Status sei_payload(AVCDecObject *decvid, AVCDecBitstream *stream, uint payloadType, uint payloadSize)
-{
- AVCDec_Status status = AVCDEC_SUCCESS;
- uint i;
- switch (payloadType)
- {
- case 0:
- /* buffering period SEI */
- status = buffering_period(decvid, stream);
- break;
- case 1:
- /* picture timing SEI */
- status = pic_timing(decvid, stream);
- break;
- case 2:
-
- case 3:
-
- case 4:
-
- case 5:
-
- case 8:
-
- case 9:
-
- case 10:
-
- case 11:
-
- case 12:
-
- case 13:
-
- case 14:
-
- case 15:
-
- case 16:
-
- case 17:
- for (i = 0; i < payloadSize; i++)
- {
- BitstreamFlushBits(stream, 8);
- }
- break;
- case 6:
- /* recovery point SEI */
- status = recovery_point(decvid, stream);
- break;
- case 7:
- /* decoded reference picture marking repetition SEI */
- status = dec_ref_pic_marking_repetition(decvid, stream);
- break;
-
- case 18:
- /* motion-constrained slice group set SEI */
- status = motion_constrained_slice_group_set(decvid, stream);
- break;
- default:
- /* reserved_sei_message */
- for (i = 0; i < payloadSize; i++)
- {
- BitstreamFlushBits(stream, 8);
- }
- break;
- }
- BitstreamByteAlign(stream);
- return status;
-}
-
-AVCDec_Status buffering_period(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint seq_parameter_set_id;
- uint temp;
- uint i;
- ue_v(stream, &seq_parameter_set_id);
- if (seq_parameter_set_id > 31)
- {
- return AVCDEC_FAIL;
- }
-
-// decvid->common->seq_parameter_set_id = seq_parameter_set_id;
-
- currSPS = decvid->seqParams[seq_parameter_set_id];
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag)
- {
- for (i = 0; i <= currSPS->vui_parameters.nal_hrd_parameters.cpb_cnt_minus1; i++)
- {
- /* initial_cpb_removal_delay[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- /*initial _cpb_removal_delay_offset[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- }
- }
-
- if (currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- for (i = 0; i <= currSPS->vui_parameters.vcl_hrd_parameters.cpb_cnt_minus1; i++)
- {
- /* initial_cpb_removal_delay[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- /*initial _cpb_removal_delay_offset[i] */
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status pic_timing(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint temp, NumClockTs = 0, time_offset_length = 24, full_timestamp_flag;
- uint i;
-
- currSPS = decvid->seqParams[decvid->common->seq_parameter_set_id];
-
- if (currSPS->vui_parameters.nal_hrd_parameters_present_flag)
- {
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- BitstreamReadBits(stream, currSPS->vui_parameters.nal_hrd_parameters.dpb_output_delay_length_minus1 + 1, &temp);
- time_offset_length = currSPS->vui_parameters.nal_hrd_parameters.time_offset_length;
- }
- else if (currSPS->vui_parameters.vcl_hrd_parameters_present_flag)
- {
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.cpb_removal_delay_length_minus1 + 1, &temp);
- BitstreamReadBits(stream, currSPS->vui_parameters.vcl_hrd_parameters.dpb_output_delay_length_minus1 + 1, &temp);
- time_offset_length = currSPS->vui_parameters.vcl_hrd_parameters.time_offset_length;
- }
-
- if (currSPS->vui_parameters.pic_struct_present_flag)
- {
- /* pic_struct */
- BitstreamReadBits(stream, 4, &temp);
-
- switch (temp)
- {
- case 0:
- case 1:
- case 2:
- NumClockTs = 1;
- break;
- case 3:
- case 4:
- case 7:
- NumClockTs = 2;
- break;
- case 5:
- case 6:
- case 8:
- NumClockTs = 3;
- break;
- default:
- NumClockTs = 0;
- break;
- }
-
- for (i = 0; i < NumClockTs; i++)
- {
- /* clock_timestamp_flag[i] */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* ct_type */
- BitstreamReadBits(stream, 2, &temp);
- /* nuit_field_based_flag */
- BitstreamRead1Bit(stream, &temp);
- /* counting_type */
- BitstreamReadBits(stream, 5, &temp);
- /* full_timestamp_flag */
- BitstreamRead1Bit(stream, &temp);
- full_timestamp_flag = temp;
- /* discontinuity_flag */
- BitstreamRead1Bit(stream, &temp);
- /* cnt_dropped_flag */
- BitstreamRead1Bit(stream, &temp);
- /* n_frames */
- BitstreamReadBits(stream, 8, &temp);
-
-
- if (full_timestamp_flag)
- {
- /* seconds_value */
- BitstreamReadBits(stream, 6, &temp);
- /* minutes_value */
- BitstreamReadBits(stream, 6, &temp);
- /* hours_value */
- BitstreamReadBits(stream, 5, &temp);
- }
- else
- {
- /* seconds_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* seconds_value */
- BitstreamReadBits(stream, 6, &temp);
- /* minutes_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* minutes_value */
- BitstreamReadBits(stream, 6, &temp);
-
- /* hourss_flag */
- BitstreamRead1Bit(stream, &temp);
-
- if (temp)
- {
- /* hours_value */
- BitstreamReadBits(stream, 5, &temp);
- }
-
- }
- }
- }
-
- if (time_offset_length)
- {
- /* time_offset */
- BitstreamReadBits(stream, time_offset_length, &temp);
- }
- else
- {
- /* time_offset */
- temp = 0;
- }
- }
- }
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status recovery_point(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp;
- /* recover_frame_cnt */
- ue_v(stream, &temp);
- /* exact_match_flag */
- BitstreamRead1Bit(stream, &temp);
- /* broken_link_flag */
- BitstreamRead1Bit(stream, &temp);
- /* changing slic_group_idc */
- BitstreamReadBits(stream, 2, &temp);
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status dec_ref_pic_marking_repetition(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- AVCSeqParamSet *currSPS;
- uint temp;
- currSPS = decvid->seqParams[decvid->common->seq_parameter_set_id];
- /* original_idr_flag */
- BitstreamRead1Bit(stream, &temp);
- /* original_frame_num */
- ue_v(stream, &temp);
- if (currSPS->frame_mbs_only_flag == 0)
- {
- /* original_field_pic_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* original_bottom_field_flag */
- BitstreamRead1Bit(stream, &temp);
- }
- }
-
- /* dec_ref_pic_marking(video,stream,sliceHdr); */
-
-
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status motion_constrained_slice_group_set(AVCDecObject *decvid, AVCDecBitstream *stream)
-{
- OSCL_UNUSED_ARG(decvid);
- uint temp, i, numBits;
- /* num_slice_groups_in_set_minus1 */
- ue_v(stream, &temp);
-
- numBits = 0;/* ceil(log2(num_slice_groups_minus1+1)) bits */
- i = temp;
- while (i > 0)
- {
- numBits++;
- i >>= 1;
- }
- for (i = 0; i <= temp; i++)
- {
- /* slice_group_id */
- BitstreamReadBits(stream, numBits, &temp);
- }
- /* exact_sample_value_match_flag */
- BitstreamRead1Bit(stream, &temp);
- /* pan_scan_rect_flag */
- BitstreamRead1Bit(stream, &temp);
- if (temp)
- {
- /* pan_scan_rect_id */
- ue_v(stream, &temp);
- }
-
- return AVCDEC_SUCCESS;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/itrans.cpp b/media/libstagefright/codecs/avc/dec/src/itrans.cpp
deleted file mode 100644
index 02c550d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/itrans.cpp
+++ /dev/null
@@ -1,307 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avclib_common.h"
-
-/* input are in the first 16 elements of block,
- output must be in the location specified in Figure 8-6. */
-/* subclause 8.5.6 */
-void Intra16DCTrans(int16 *block, int Qq, int Rq)
-{
- int m0, m1, m2, m3;
- int j, offset;
- int16 *inout;
- int scale = dequant_coefres[Rq][0];
-
- inout = block;
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[4];
- m1 = inout[0] - inout[4];
- m2 = inout[8] + inout[12];
- m3 = inout[8] - inout[12];
-
-
- inout[0] = m0 + m2;
- inout[4] = m0 - m2;
- inout[8] = m1 - m3;
- inout[12] = m1 + m3;
- inout += 64;
- }
-
- inout = block;
-
- if (Qq >= 2) /* this way should be faster than JM */
- { /* they use (((m4*scale)<<(QPy/6))+2)>>2 for both cases. */
- Qq -= 2;
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[64];
- m1 = inout[0] - inout[64];
- m2 = inout[128] + inout[192];
- m3 = inout[128] - inout[192];
-
- inout[0] = ((m0 + m2) * scale) << Qq;
- inout[64] = ((m0 - m2) * scale) << Qq;
- inout[128] = ((m1 - m3) * scale) << Qq;
- inout[192] = ((m1 + m3) * scale) << Qq;
- inout += 4;
- }
- }
- else
- {
- Qq = 2 - Qq;
- offset = 1 << (Qq - 1);
-
- for (j = 0; j < 4; j++)
- {
- m0 = inout[0] + inout[64];
- m1 = inout[0] - inout[64];
- m2 = inout[128] + inout[192];
- m3 = inout[128] - inout[192];
-
- inout[0] = (((m0 + m2) * scale + offset) >> Qq);
- inout[64] = (((m0 - m2) * scale + offset) >> Qq);
- inout[128] = (((m1 - m3) * scale + offset) >> Qq);
- inout[192] = (((m1 + m3) * scale + offset) >> Qq);
- inout += 4;
- }
- }
-
- return ;
-}
-
-/* see subclase 8.5.8 */
-void itrans(int16 *block, uint8 *pred, uint8 *cur, int width)
-{
- int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
- /* shall never exceed 16bit sign value, but we don't check */
- int i; /* to save the cycles. */
- int16 *inout;
-
- inout = block;
-
- for (i = 4; i > 0; i--)
- {
- e0 = inout[0] + inout[2];
- e1 = inout[0] - inout[2];
- e2 = (inout[1] >> 1) - inout[3];
- e3 = inout[1] + (inout[3] >> 1);
-
- inout[0] = e0 + e3;
- inout[1] = e1 + e2;
- inout[2] = e1 - e2;
- inout[3] = e0 - e3;
-
- inout += 16;
- }
-
- for (i = 4; i > 0; i--)
- {
- e0 = block[0] + block[32];
- e1 = block[0] - block[32];
- e2 = (block[16] >> 1) - block[48];
- e3 = block[16] + (block[48] >> 1);
-
- e0 += e3;
- e3 = (e0 - (e3 << 1)); /* e0-e3 */
- e1 += e2;
- e2 = (e1 - (e2 << 1)); /* e1-e2 */
- e0 += 32;
- e1 += 32;
- e2 += 32;
- e3 += 32;
-#ifdef USE_PRED_BLOCK
- e0 = pred[0] + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- e1 = pred[20] + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- e2 = pred[40] + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- e3 = pred[60] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- *cur = e0;
- *(cur += width) = e1;
- *(cur += width) = e2;
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
- pred++;
-#else
- OSCL_UNUSED_ARG(pred);
-
- e0 = *cur + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- *cur = e0;
- e1 = *(cur += width) + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- *cur = e1;
- e2 = *(cur += width) + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- *cur = e2;
- e3 = cur[width] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
-#endif
- block++;
- }
-
- return ;
-}
-
-/* see subclase 8.5.8 */
-void ictrans(int16 *block, uint8 *pred, uint8 *cur, int width)
-{
- int e0, e1, e2, e3; /* note, at every step of the calculation, these values */
- /* shall never exceed 16bit sign value, but we don't check */
- int i; /* to save the cycles. */
- int16 *inout;
-
- inout = block;
-
- for (i = 4; i > 0; i--)
- {
- e0 = inout[0] + inout[2];
- e1 = inout[0] - inout[2];
- e2 = (inout[1] >> 1) - inout[3];
- e3 = inout[1] + (inout[3] >> 1);
-
- inout[0] = e0 + e3;
- inout[1] = e1 + e2;
- inout[2] = e1 - e2;
- inout[3] = e0 - e3;
-
- inout += 16;
- }
-
- for (i = 4; i > 0; i--)
- {
- e0 = block[0] + block[32];
- e1 = block[0] - block[32];
- e2 = (block[16] >> 1) - block[48];
- e3 = block[16] + (block[48] >> 1);
-
- e0 += e3;
- e3 = (e0 - (e3 << 1)); /* e0-e3 */
- e1 += e2;
- e2 = (e1 - (e2 << 1)); /* e1-e2 */
- e0 += 32;
- e1 += 32;
- e2 += 32;
- e3 += 32;
-#ifdef USE_PRED_BLOCK
- e0 = pred[0] + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- e1 = pred[12] + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- e2 = pred[24] + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- e3 = pred[36] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- *cur = e0;
- *(cur += width) = e1;
- *(cur += width) = e2;
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
- pred++;
-#else
- OSCL_UNUSED_ARG(pred);
-
- e0 = *cur + (e0 >> 6);
- if ((uint)e0 > 0xFF) e0 = 0xFF & (~(e0 >> 31)); /* clip */
- *cur = e0;
- e1 = *(cur += width) + (e1 >> 6);
- if ((uint)e1 > 0xFF) e1 = 0xFF & (~(e1 >> 31)); /* clip */
- *cur = e1;
- e2 = *(cur += width) + (e2 >> 6);
- if ((uint)e2 > 0xFF) e2 = 0xFF & (~(e2 >> 31)); /* clip */
- *cur = e2;
- e3 = cur[width] + (e3 >> 6);
- if ((uint)e3 > 0xFF) e3 = 0xFF & (~(e3 >> 31)); /* clip */
- cur[width] = e3;
- cur -= (width << 1);
- cur++;
-#endif
- block++;
- }
-
- return ;
-}
-
-/* see subclause 8.5.7 */
-void ChromaDCTrans(int16 *block, int Qq, int Rq)
-{
- int c00, c01, c10, c11;
- int f0, f1, f2, f3;
- int scale = dequant_coefres[Rq][0];
-
- c00 = block[0] + block[4];
- c01 = block[0] - block[4];
- c10 = block[64] + block[68];
- c11 = block[64] - block[68];
-
- f0 = c00 + c10;
- f1 = c01 + c11;
- f2 = c00 - c10;
- f3 = c01 - c11;
-
- if (Qq >= 1)
- {
- Qq -= 1;
- block[0] = (f0 * scale) << Qq;
- block[4] = (f1 * scale) << Qq;
- block[64] = (f2 * scale) << Qq;
- block[68] = (f3 * scale) << Qq;
- }
- else
- {
- block[0] = (f0 * scale) >> 1;
- block[4] = (f1 * scale) >> 1;
- block[64] = (f2 * scale) >> 1;
- block[68] = (f3 * scale) >> 1;
- }
-
- return ;
-}
-
-
-void copy_block(uint8 *pred, uint8 *cur, int width, int pred_pitch)
-{
- uint32 temp;
-
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- pred += pred_pitch;
- *((uint32*)cur) = temp;
- cur += width;
- temp = *((uint32*)pred);
- *((uint32*)cur) = temp;
-
- return ;
-}
-
-
diff --git a/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp b/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp
deleted file mode 100644
index ba36c37..0000000
--- a/media/libstagefright/codecs/avc/dec/src/pred_inter.cpp
+++ /dev/null
@@ -1,2329 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-
-
-#define CLIP_RESULT(x) if((uint)x > 0xFF){ \
- x = 0xFF & (~(x>>31));}
-
-/* (blkwidth << 2) + (dy << 1) + dx */
-static void (*const ChromaMC_SIMD[8])(uint8 *, int , int , int , uint8 *, int, int , int) =
-{
- &ChromaFullMC_SIMD,
- &ChromaHorizontalMC_SIMD,
- &ChromaVerticalMC_SIMD,
- &ChromaDiagonalMC_SIMD,
- &ChromaFullMC_SIMD,
- &ChromaHorizontalMC2_SIMD,
- &ChromaVerticalMC2_SIMD,
- &ChromaDiagonalMC2_SIMD
-};
-/* Perform motion prediction and compensation with residue if exist. */
-void InterMBPrediction(AVCCommonObj *video)
-{
- AVCMacroblock *currMB = video->currMB;
- AVCPictureData *currPic = video->currPic;
- int mbPartIdx, subMbPartIdx;
- int ref_idx;
- int offset_MbPart_indx = 0;
- int16 *mv;
- uint32 x_pos, y_pos;
- uint8 *curL, *curCb, *curCr;
- uint8 *ref_l, *ref_Cb, *ref_Cr;
- uint8 *predBlock, *predCb, *predCr;
- int block_x, block_y, offset_x, offset_y, offsetP, offset;
- int x_position = (video->mb_x << 4);
- int y_position = (video->mb_y << 4);
- int MbHeight, MbWidth, mbPartIdx_X, mbPartIdx_Y, offset_indx;
- int picWidth = currPic->pitch;
- int picHeight = currPic->height;
- int16 *dataBlock;
- uint32 cbp4x4;
- uint32 tmp_word;
-
- tmp_word = y_position * picWidth;
- curL = currPic->Sl + tmp_word + x_position;
- offset = (tmp_word >> 2) + (x_position >> 1);
- curCb = currPic->Scb + offset;
- curCr = currPic->Scr + offset;
-
-#ifdef USE_PRED_BLOCK
- predBlock = video->pred + 84;
- predCb = video->pred + 452;
- predCr = video->pred + 596;
-#else
- predBlock = curL;
- predCb = curCb;
- predCr = curCr;
-#endif
-
- GetMotionVectorPredictor(video, false);
-
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- MbHeight = currMB->SubMbPartHeight[mbPartIdx];
- MbWidth = currMB->SubMbPartWidth[mbPartIdx];
- mbPartIdx_X = ((mbPartIdx + offset_MbPart_indx) & 1);
- mbPartIdx_Y = (mbPartIdx + offset_MbPart_indx) >> 1;
- ref_idx = currMB->ref_idx_L0[(mbPartIdx_Y << 1) + mbPartIdx_X];
- offset_indx = 0;
-
- ref_l = video->RefPicList0[ref_idx]->Sl;
- ref_Cb = video->RefPicList0[ref_idx]->Scb;
- ref_Cr = video->RefPicList0[ref_idx]->Scr;
-
- for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
- {
- block_x = (mbPartIdx_X << 1) + ((subMbPartIdx + offset_indx) & 1); // check this
- block_y = (mbPartIdx_Y << 1) + (((subMbPartIdx + offset_indx) >> 1) & 1);
- mv = (int16*)(currMB->mvL0 + block_x + (block_y << 2));
- offset_x = x_position + (block_x << 2);
- offset_y = y_position + (block_y << 2);
- x_pos = (offset_x << 2) + *mv++; /*quarter pel */
- y_pos = (offset_y << 2) + *mv; /*quarter pel */
-
- //offset = offset_y * currPic->width;
- //offsetC = (offset >> 2) + (offset_x >> 1);
-#ifdef USE_PRED_BLOCK
- offsetP = (block_y * 80) + (block_x << 2);
- LumaMotionComp(ref_l, picWidth, picHeight, x_pos, y_pos,
- /*comp_Sl + offset + offset_x,*/
- predBlock + offsetP, 20, MbWidth, MbHeight);
-#else
- offsetP = (block_y << 2) * picWidth + (block_x << 2);
- LumaMotionComp(ref_l, picWidth, picHeight, x_pos, y_pos,
- /*comp_Sl + offset + offset_x,*/
- predBlock + offsetP, picWidth, MbWidth, MbHeight);
-#endif
-
-#ifdef USE_PRED_BLOCK
- offsetP = (block_y * 24) + (block_x << 1);
- ChromaMotionComp(ref_Cb, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scb + offsetC,*/
- predCb + offsetP, 12, MbWidth >> 1, MbHeight >> 1);
- ChromaMotionComp(ref_Cr, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scr + offsetC,*/
- predCr + offsetP, 12, MbWidth >> 1, MbHeight >> 1);
-#else
- offsetP = (block_y * picWidth) + (block_x << 1);
- ChromaMotionComp(ref_Cb, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scb + offsetC,*/
- predCb + offsetP, picWidth >> 1, MbWidth >> 1, MbHeight >> 1);
- ChromaMotionComp(ref_Cr, picWidth >> 1, picHeight >> 1, x_pos, y_pos,
- /*comp_Scr + offsetC,*/
- predCr + offsetP, picWidth >> 1, MbWidth >> 1, MbHeight >> 1);
-#endif
-
- offset_indx = currMB->SubMbPartWidth[mbPartIdx] >> 3;
- }
- offset_MbPart_indx = currMB->MbPartWidth >> 4;
- }
-
- /* used in decoder, used to be if(!encFlag) */
-
- /* transform in raster scan order */
- dataBlock = video->block;
- cbp4x4 = video->cbp4x4;
- /* luma */
- for (block_y = 4; block_y > 0; block_y--)
- {
- for (block_x = 4; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- itrans(dataBlock, predBlock, predBlock, 20);
- }
-#else
- if (cbp4x4&1)
- {
- itrans(dataBlock, curL, curL, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predBlock += 4;
-#else
- curL += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predBlock += 64;
-#else
- curL += ((picWidth << 2) - 16);
-#endif
- }
-
- /* chroma */
- picWidth = (picWidth >> 1);
- for (block_y = 2; block_y > 0; block_y--)
- {
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCb, predCb, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCb, curCb, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCb += 4;
-#else
- curCb += 4;
-#endif
- }
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCr, predCr, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCr, curCr, picWidth);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCr += 4;
-#else
- curCr += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predCb += 40;
- predCr += 40;
-#else
- curCb += ((picWidth << 2) - 8);
- curCr += ((picWidth << 2) - 8);
-#endif
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset);
-#endif
-
- return ;
-}
-
-
-/* preform the actual motion comp here */
-void LumaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight)
-{
- int dx, dy;
- uint8 temp[24][24]; /* for padding, make the size multiple of 4 for packing */
- int temp2[21][21]; /* for intermediate results */
- uint8 *ref2;
-
- dx = x_pos & 3;
- dy = y_pos & 3;
- x_pos = x_pos >> 2; /* round it to full-pel resolution */
- y_pos = y_pos >> 2;
-
- /* perform actual motion compensation */
- if (dx == 0 && dy == 0)
- { /* fullpel position *//* G */
- if (x_pos >= 0 && x_pos + blkwidth <= picwidth && y_pos >= 0 && y_pos + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
- FullPelMC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight);
- }
- else
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos, &temp[0][0], blkwidth, blkheight);
- FullPelMC(&temp[0][0], 24, pred, pred_pitch, blkwidth, blkheight);
- }
-
- } /* other positions */
- else if (dy == 0)
- { /* no vertical interpolation *//* a,b,c*/
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos >= 0 && y_pos + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
-
- HorzInterp1MC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos, &temp[0][0], blkwidth + 5, blkheight);
-
- HorzInterp1MC(&temp[0][2], 24, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- }
- else if (dx == 0)
- { /*no horizontal interpolation *//* d,h,n */
-
- if (x_pos >= 0 && x_pos + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos;
-
- VertInterp1MC(ref, picwidth, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos - 2, &temp[0][0], blkwidth, blkheight + 5);
-
- VertInterp1MC(&temp[2][0], 24, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- }
- else if (dy == 2)
- { /* horizontal cross *//* i, j, k */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += y_pos * picwidth + x_pos - 2; /* move to the left 2 pixels */
-
- VertInterp2MC(ref, picwidth, &temp2[0][0], 21, blkwidth + 5, blkheight);
-
- HorzInterp2MC(&temp2[0][2], 21, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5, blkheight + 5);
-
- VertInterp2MC(&temp[2][0], 24, &temp2[0][0], 21, blkwidth + 5, blkheight);
-
- HorzInterp2MC(&temp2[0][2], 21, pred, pred_pitch, blkwidth, blkheight, dx);
- }
- }
- else if (dx == 2)
- { /* vertical cross */ /* f,q */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + blkwidth <= picwidth && y_pos - 2 >= 0 && y_pos + 3 + blkheight <= picheight)
- {
- ref += (y_pos - 2) * picwidth + x_pos; /* move to up 2 lines */
-
- HorzInterp3MC(ref, picwidth, &temp2[0][0], 21, blkwidth, blkheight + 5);
- VertInterp3MC(&temp2[2][0], 21, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5, blkheight + 5);
- HorzInterp3MC(&temp[0][2], 24, &temp2[0][0], 21, blkwidth, blkheight + 5);
- VertInterp3MC(&temp2[2][0], 21, pred, pred_pitch, blkwidth, blkheight, dy);
- }
- }
- else
- { /* diagonal *//* e,g,p,r */
-
- if (x_pos - 2 >= 0 && x_pos + 3 + (dx / 2) + blkwidth <= picwidth &&
- y_pos - 2 >= 0 && y_pos + 3 + blkheight + (dy / 2) <= picheight)
- {
- ref2 = ref + (y_pos + (dy / 2)) * picwidth + x_pos;
-
- ref += (y_pos * picwidth) + x_pos + (dx / 2);
-
- DiagonalInterpMC(ref2, ref, picwidth, pred, pred_pitch, blkwidth, blkheight);
- }
- else /* need padding */
- {
- CreatePad(ref, picwidth, picheight, x_pos - 2, y_pos - 2, &temp[0][0], blkwidth + 5 + (dx / 2), blkheight + 5 + (dy / 2));
-
- ref2 = &temp[2 + (dy/2)][2];
-
- ref = &temp[2][2 + (dx/2)];
-
- DiagonalInterpMC(ref2, ref, 24, pred, pred_pitch, blkwidth, blkheight);
- }
- }
-
- return ;
-}
-
-void CreateAlign(uint8 *ref, int picwidth, int y_pos,
- uint8 *out, int blkwidth, int blkheight)
-{
- int i, j;
- int offset, out_offset;
- uint32 prev_pix, result, pix1, pix2, pix4;
-
- out_offset = 24 - blkwidth;
-
- //switch(x_pos&0x3){
- switch (((uint32)ref)&0x3)
- {
- case 1:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 3;
- for (j = 0; j < blkheight; j++)
- {
- pix1 = *ref++;
- pix2 = *((uint16*)ref);
- ref += 2;
- result = (pix2 << 8) | pix1;
-
- for (i = 3; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 24) & 0xFF000000; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 8; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- case 2:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 2;
- for (j = 0; j < blkheight; j++)
- {
- result = *((uint16*)ref);
- ref += 2;
- for (i = 2; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 16) & 0xFFFF0000; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 16; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- case 3:
- ref += y_pos * picwidth;
- offset = picwidth - blkwidth - 1;
- for (j = 0; j < blkheight; j++)
- {
- result = *ref++;
- for (i = 1; i < blkwidth; i += 4)
- {
- pix4 = *((uint32*)ref);
- ref += 4;
- prev_pix = (pix4 << 8) & 0xFFFFFF00; /* mask out byte belong to previous word */
- result |= prev_pix;
- *((uint32*)out) = result; /* write 4 bytes */
- out += 4;
- result = pix4 >> 24; /* for the next loop */
- }
- ref += offset;
- out += out_offset;
- }
- break;
- }
-}
-
-void CreatePad(uint8 *ref, int picwidth, int picheight, int x_pos, int y_pos,
- uint8 *out, int blkwidth, int blkheight)
-{
- int x_inc0, x_mid;
- int y_inc, y_inc0, y_inc1, y_mid;
- int i, j;
- int offset;
-
- if (x_pos < 0)
- {
- x_inc0 = 0; /* increment for the first part */
- x_mid = ((blkwidth + x_pos > 0) ? -x_pos : blkwidth); /* stopping point */
- x_pos = 0;
- }
- else if (x_pos + blkwidth > picwidth)
- {
- x_inc0 = 1; /* increasing */
- x_mid = ((picwidth > x_pos) ? picwidth - x_pos - 1 : 0); /* clip negative to zero, encode fool proof! */
- }
- else /* normal case */
- {
- x_inc0 = 1;
- x_mid = blkwidth; /* just one run */
- }
-
-
- /* boundary for y_pos, taking the result from x_pos into account */
- if (y_pos < 0)
- {
- y_inc0 = (x_inc0 ? - x_mid : -blkwidth + x_mid); /* offset depending on x_inc1 and x_inc0 */
- y_inc1 = picwidth + y_inc0;
- y_mid = ((blkheight + y_pos > 0) ? -y_pos : blkheight); /* clip to prevent memory corruption */
- y_pos = 0;
- }
- else if (y_pos + blkheight > picheight)
- {
- y_inc1 = (x_inc0 ? - x_mid : -blkwidth + x_mid); /* saturate */
- y_inc0 = picwidth + y_inc1; /* increasing */
- y_mid = ((picheight > y_pos) ? picheight - 1 - y_pos : 0);
- }
- else /* normal case */
- {
- y_inc1 = (x_inc0 ? - x_mid : -blkwidth + x_mid);
- y_inc0 = picwidth + y_inc1;
- y_mid = blkheight;
- }
-
- /* clip y_pos and x_pos */
- if (y_pos > picheight - 1) y_pos = picheight - 1;
- if (x_pos > picwidth - 1) x_pos = picwidth - 1;
-
- ref += y_pos * picwidth + x_pos;
-
- y_inc = y_inc0; /* start with top half */
-
- offset = 24 - blkwidth; /* to use in offset out */
- blkwidth -= x_mid; /* to use in the loop limit */
-
- if (x_inc0 == 0)
- {
- for (j = 0; j < blkheight; j++)
- {
- if (j == y_mid) /* put a check here to reduce the code size (for unrolling the loop) */
- {
- y_inc = y_inc1; /* switch to lower half */
- }
- for (i = x_mid; i > 0; i--) /* first or third quarter */
- {
- *out++ = *ref;
- }
- for (i = blkwidth; i > 0; i--) /* second or fourth quarter */
- {
- *out++ = *ref++;
- }
- out += offset;
- ref += y_inc;
- }
- }
- else
- {
- for (j = 0; j < blkheight; j++)
- {
- if (j == y_mid) /* put a check here to reduce the code size (for unrolling the loop) */
- {
- y_inc = y_inc1; /* switch to lower half */
- }
- for (i = x_mid; i > 0; i--) /* first or third quarter */
- {
- *out++ = *ref++;
- }
- for (i = blkwidth; i > 0; i--) /* second or fourth quarter */
- {
- *out++ = *ref;
- }
- out += offset;
- ref += y_inc;
- }
- }
-
- return ;
-}
-
-void HorzInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx)
-{
- uint8 *p_ref;
- uint32 *p_cur;
- uint32 tmp, pkres;
- int result, curr_offset, ref_offset;
- int j;
- int32 r0, r1, r2, r3, r4, r5;
- int32 r13, r6;
-
- p_cur = (uint32*)out; /* assume it's word aligned */
- curr_offset = (outpitch - blkwidth) >> 2;
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- if (dx&1)
- {
- dx = ((dx >> 1) ? -3 : -4); /* use in 3/4 pel */
- p_ref -= 2;
- r13 = 0;
- for (j = blkheight; j > 0; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
- while ((uint32)p_ref < tmp)
- {
- r2 = *(p_ref += 4); /* move pointer to e */
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
-
- r5 = p_ref[dx+2];
- r6 = p_ref[dx+4];
- r5 |= (r6 << 16);
- r4 += r5;
- r4 += 0x10001;
- r4 = (r4 >> 1) & 0xFF00FF;
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
-
- r0 = p_ref[dx+3];
- r1 = p_ref[dx+5];
- r0 |= (r1 << 16);
- r5 += r0;
- r5 += 0x10001;
- r5 = (r5 >> 1) & 0xFF00FF;
-
- r4 |= (r5 << 8); /* pack them together */
- *p_cur++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_cur -= (outpitch >> 2);
-
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- pkres = (result >> 1) ;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dx] + 1);
- result = (result >> 1);
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 5; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
- }
- else
- {
- p_ref -= 2;
- r13 = 0;
- for (j = blkheight; j > 0; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
- while ((uint32)p_ref < tmp)
- {
- r2 = *(p_ref += 4); /* move pointer to e */
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
- r4 &= 0xFF00FF; /* mask */
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
- r5 &= 0xFF00FF; /* mask */
-
- r4 |= (r5 << 8); /* pack them together */
- *p_cur++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_cur -= (outpitch >> 2);
-
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 5;
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset;
- }
- }
- }
-
- return ;
-}
-
-void HorzInterp2MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dx)
-{
- int *p_ref;
- uint32 *p_cur;
- uint32 tmp, pkres;
- int result, result2, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = (uint32*)out; /* assume it's word aligned */
- curr_offset = (outpitch - blkwidth) >> 2;
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- if (dx&1)
- {
- dx = ((dx >> 1) ? -3 : -4); /* use in 3/4 pel */
-
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- pkres = (result >> 1);
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dx] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 3; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
- else
- {
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- pkres |= (result << 24);
- *p_cur++ = pkres; /* write 4 pixels */
- p_ref -= 3; /* offset back to the middle of filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset; /* move to the next line */
- }
- }
-
- return ;
-}
-
-void HorzInterp3MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight)
-{
- uint8 *p_ref;
- int *p_cur;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = (outpitch - blkwidth);
- p_ref = in;
- ref_offset = inpitch - blkwidth;
-
- for (j = blkheight; j > 0 ; j--)
- {
- tmp = (uint32)(p_ref + blkwidth);
- for (; (uint32)p_ref < tmp;)
- {
-
- r0 = p_ref[-2];
- r1 = p_ref[-1];
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- *p_cur++ = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- *p_cur++ = result;
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- *p_cur++ = result;
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- *p_cur++ = result;
- p_ref -= 3; /* move back to the middle of the filter */
- }
- p_cur += curr_offset; /* move to the next line */
- p_ref += ref_offset;
- }
-
- return ;
-}
-void VertInterp1MC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy)
-{
- uint8 *p_cur, *p_ref;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, i;
- int32 r0, r1, r2, r3, r4, r5, r6, r7, r8, r13;
- uint8 tmp_in[24][24];
-
- /* not word-aligned */
- if (((uint32)in)&0x3)
- {
- CreateAlign(in, inpitch, -2, &tmp_in[0][0], blkwidth, blkheight + 5);
- in = &tmp_in[2][0];
- inpitch = 24;
- }
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- curr_offset += 3;
-
- if (dy&1)
- {
- dy = (dy >> 1) ? 0 : -inpitch;
-
- for (j = 0; j < blkwidth; j += 4, in += 4)
- {
- r13 = 0;
- p_ref = in;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
-
- r1 = *((uint32*)(p_ref + dy));
- r2 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r0 += r1;
- r6 += r2;
- r0 += 0x10001;
- r6 += 0x10001;
- r0 = (r0 >> 1) & 0xFF00FF;
- r6 = (r6 >> 1) & 0xFF00FF;
-
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in + i;
- p_cur -= outpitch; /* compensate for the first offset */
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- /* 3/4 pel, no need to clip */
- result = (result + p_ref[dy-(inpitch<<1)] + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
- }
- else
- {
- for (j = 0; j < blkwidth; j += 4, in += 4)
- {
- r13 = 0;
- p_ref = in;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
- r0 &= 0xFF00FF;
- r6 &= 0xFF00FF;
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in + i;
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
- }
-
- return ;
-}
-
-void VertInterp2MC(uint8 *in, int inpitch, int *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int *p_cur;
- uint8 *p_ref;
- uint32 tmp;
- int result, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
-
- return ;
-}
-
-void VertInterp3MC(int *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight, int dy)
-{
- uint8 *p_cur;
- int *p_ref;
- uint32 tmp;
- int result, result2, curr_offset, ref_offset;
- int j, r0, r1, r2, r3, r4, r5;
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically back up and one pixel to right */
- ref_offset = blkheight * inpitch; /* for limit */
-
- if (dy&1)
- {
- dy = (dy >> 1) ? -(inpitch << 1) : -(inpitch << 1) - inpitch;
-
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- result2 = ((p_ref[dy] + 16) >> 5);
- CLIP_RESULT(result2)
- /* 3/4 pel, no need to clip */
- result = (result + result2 + 1);
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
- }
- else
- {
- for (j = 0; j < blkwidth; j++)
- {
- p_cur -= outpitch; /* compensate for the first offset */
- p_ref = in++;
-
- tmp = (uint32)(p_ref + ref_offset); /* limit */
- while ((uint32)p_ref < tmp)
- { /* loop un-rolled */
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 512) >> 10;
- CLIP_RESULT(result)
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += curr_offset;
- }
- }
-
- return ;
-}
-
-void DiagonalInterpMC(uint8 *in1, uint8 *in2, int inpitch,
- uint8 *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int j, i;
- int result;
- uint8 *p_cur, *p_ref, *p_tmp8;
- int curr_offset, ref_offset;
- uint8 tmp_res[24][24], tmp_in[24][24];
- uint32 *p_tmp;
- uint32 tmp, pkres, tmp_result;
- int32 r0, r1, r2, r3, r4, r5;
- int32 r6, r7, r8, r9, r10, r13;
-
- ref_offset = inpitch - blkwidth;
- p_ref = in1 - 2;
- /* perform horizontal interpolation */
- /* not word-aligned */
- /* It is faster to read 1 byte at time to avoid calling CreateAlign */
- /* if(((uint32)p_ref)&0x3)
- {
- CreateAlign(p_ref,inpitch,0,&tmp_in[0][0],blkwidth+8,blkheight);
- p_ref = &tmp_in[0][0];
- ref_offset = 24-blkwidth;
- }*/
-
- p_tmp = (uint32*) & (tmp_res[0][0]);
- for (j = blkheight; j > 0; j--)
- {
- r13 = 0;
- tmp = (uint32)(p_ref + blkwidth);
-
- //r0 = *((uint32*)p_ref); /* d,c,b,a */
- //r1 = (r0>>8)&0xFF00FF; /* 0,d,0,b */
- //r0 &= 0xFF00FF; /* 0,c,0,a */
- /* It is faster to read 1 byte at a time, */
- r0 = p_ref[0];
- r1 = p_ref[2];
- r0 |= (r1 << 16); /* 0,c,0,a */
- r1 = p_ref[1];
- r2 = p_ref[3];
- r1 |= (r2 << 16); /* 0,d,0,b */
-
- while ((uint32)p_ref < tmp)
- {
- //r2 = *((uint32*)(p_ref+=4));/* h,g,f,e */
- //r3 = (r2>>8)&0xFF00FF; /* 0,h,0,f */
- //r2 &= 0xFF00FF; /* 0,g,0,e */
- /* It is faster to read 1 byte at a time, */
- r2 = *(p_ref += 4);
- r3 = p_ref[2];
- r2 |= (r3 << 16); /* 0,g,0,e */
- r3 = p_ref[1];
- r4 = p_ref[3];
- r3 |= (r4 << 16); /* 0,h,0,f */
-
- r4 = r0 + r3; /* c+h, a+f */
- r5 = r0 + r1; /* c+d, a+b */
- r6 = r2 + r3; /* g+h, e+f */
- r5 >>= 16;
- r5 |= (r6 << 16); /* e+f, c+d */
- r4 += r5 * 20; /* c+20*e+20*f+h, a+20*c+20*d+f */
- r4 += 0x100010; /* +16, +16 */
- r5 = r1 + r2; /* d+g, b+e */
- r4 -= r5 * 5; /* c-5*d+20*e+20*f-5*g+h, a-5*b+20*c+20*d-5*e+f */
- r4 >>= 5;
- r13 |= r4; /* check clipping */
- r4 &= 0xFF00FF; /* mask */
-
- r5 = p_ref[4]; /* i */
- r6 = (r5 << 16);
- r5 = r6 | (r2 >> 16);/* 0,i,0,g */
- r5 += r1; /* d+i, b+g */ /* r5 not free */
- r1 >>= 16;
- r1 |= (r3 << 16); /* 0,f,0,d */ /* r1 has changed */
- r1 += r2; /* f+g, d+e */
- r5 += 20 * r1; /* d+20f+20g+i, b+20d+20e+g */
- r0 >>= 16;
- r0 |= (r2 << 16); /* 0,e,0,c */ /* r0 has changed */
- r0 += r3; /* e+h, c+f */
- r5 += 0x100010; /* 16,16 */
- r5 -= r0 * 5; /* d-5e+20f+20g-5h+i, b-5c+20d+20e-5f+g */
- r5 >>= 5;
- r13 |= r5; /* check clipping */
- r5 &= 0xFF00FF; /* mask */
-
- r4 |= (r5 << 8); /* pack them together */
- *p_tmp++ = r4;
- r1 = r3;
- r0 = r2;
- }
- p_tmp += ((24 - blkwidth) >> 2); /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
-
- if (r13&0xFF000700) /* need clipping */
- {
- /* move back to the beginning of the line */
- p_ref -= (ref_offset + blkwidth); /* input */
- p_tmp -= 6; /* intermediate output */
- tmp = (uint32)(p_ref + blkwidth);
- while ((uint32)p_ref < tmp)
- {
- r0 = *p_ref++;
- r1 = *p_ref++;
- r2 = *p_ref++;
- r3 = *p_ref++;
- r4 = *p_ref++;
- /* first pixel */
- r5 = *p_ref++;
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres = result;
- /* second pixel */
- r0 = *p_ref++;
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 8);
- /* third pixel */
- r1 = *p_ref++;
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 16);
- /* fourth pixel */
- r2 = *p_ref++;
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- pkres |= (result << 24);
-
- *p_tmp++ = pkres; /* write 4 pixel */
- p_ref -= 5;
- }
- p_tmp += ((24 - blkwidth) >> 2); /* move to the next line */
- p_ref += ref_offset; /* ref_offset = inpitch-blkwidth; */
- }
- }
-
- /* perform vertical interpolation */
- /* not word-aligned */
- if (((uint32)in2)&0x3)
- {
- CreateAlign(in2, inpitch, -2, &tmp_in[0][0], blkwidth, blkheight + 5);
- in2 = &tmp_in[2][0];
- inpitch = 24;
- }
-
- p_cur = out;
- curr_offset = 1 - outpitch * (blkheight - 1); /* offset vertically up and one pixel right */
- pkres = blkheight * inpitch; /* reuse it for limit */
-
- curr_offset += 3;
-
- for (j = 0; j < blkwidth; j += 4, in2 += 4)
- {
- r13 = 0;
- p_ref = in2;
- p_tmp8 = &(tmp_res[0][j]); /* intermediate result */
- p_tmp8 -= 24; /* compensate for the first offset */
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + pkres); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- /* Read 1 byte at a time is too slow, too many read and pack ops, need to call CreateAlign, */
- /*p_ref8 = p_ref-(inpitch<<1); r0 = p_ref8[0]; r1 = p_ref8[2];
- r0 |= (r1<<16); r6 = p_ref8[1]; r1 = p_ref8[3];
- r6 |= (r1<<16); p_ref+=inpitch; */
- r0 = *((uint32*)(p_ref - (inpitch << 1))); /* load 4 bytes */
- p_ref += inpitch;
- r6 = (r0 >> 8) & 0xFF00FF; /* second and fourth byte */
- r0 &= 0xFF00FF;
-
- /*p_ref8 = p_ref+(inpitch<<1);
- r1 = p_ref8[0]; r7 = p_ref8[2]; r1 |= (r7<<16);
- r7 = p_ref8[1]; r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref + (inpitch << 1))); /* r1, r7, ref[3] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
-
- r0 += r1;
- r6 += r7;
-
- /*r2 = p_ref[0]; r8 = p_ref[2]; r2 |= (r8<<16);
- r8 = p_ref[1]; r1 = p_ref[3]; r8 |= (r1<<16);*/
- r2 = *((uint32*)p_ref); /* r2, r8, ref[1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- /*p_ref8 = p_ref-inpitch; r1 = p_ref8[0]; r7 = p_ref8[2];
- r1 |= (r7<<16); r1 += r2; r7 = p_ref8[1];
- r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref - inpitch)); /* r1, r7, ref[0] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 += 20 * r1;
- r6 += 20 * r7;
- r0 += 0x100010;
- r6 += 0x100010;
-
- /*p_ref8 = p_ref-(inpitch<<1); r2 = p_ref8[0]; r8 = p_ref8[2];
- r2 |= (r8<<16); r8 = p_ref8[1]; r1 = p_ref8[3]; r8 |= (r1<<16);*/
- r2 = *((uint32*)(p_ref - (inpitch << 1))); /* r2, r8, ref[-1] */
- r8 = (r2 >> 8) & 0xFF00FF;
- r2 &= 0xFF00FF;
-
- /*p_ref8 = p_ref+inpitch; r1 = p_ref8[0]; r7 = p_ref8[2];
- r1 |= (r7<<16); r1 += r2; r7 = p_ref8[1];
- r2 = p_ref8[3]; r7 |= (r2<<16);*/
- r1 = *((uint32*)(p_ref + inpitch)); /* r1, r7, ref[2] */
- r7 = (r1 >> 8) & 0xFF00FF;
- r1 &= 0xFF00FF;
- r1 += r2;
-
- r7 += r8;
-
- r0 -= 5 * r1;
- r6 -= 5 * r7;
-
- r0 >>= 5;
- r6 >>= 5;
- /* clip */
- r13 |= r6;
- r13 |= r0;
- //CLIPPACK(r6,result)
- /* add with horizontal results */
- r10 = *((uint32*)(p_tmp8 += 24));
- r9 = (r10 >> 8) & 0xFF00FF;
- r10 &= 0xFF00FF;
-
- r0 += r10;
- r0 += 0x10001;
- r0 = (r0 >> 1) & 0xFF00FF; /* mask to 8 bytes */
-
- r6 += r9;
- r6 += 0x10001;
- r6 = (r6 >> 1) & 0xFF00FF; /* mask to 8 bytes */
-
- r0 |= (r6 << 8); /* pack it back */
- *((uint32*)(p_cur += outpitch)) = r0;
- }
- p_cur += curr_offset; /* offset to the next pixel */
- if (r13 & 0xFF000700) /* this column need clipping */
- {
- p_cur -= 4;
- for (i = 0; i < 4; i++)
- {
- p_ref = in2 + i;
- p_tmp8 = &(tmp_res[0][j+i]); /* intermediate result */
- p_tmp8 -= 24; /* compensate for the first offset */
- p_cur -= outpitch; /* compensate for the first offset */
- tmp = (uint32)(p_ref + pkres); /* limit */
- while ((uint32)p_ref < tmp) /* the loop un-rolled */
- {
- r0 = *(p_ref - (inpitch << 1));
- r1 = *(p_ref - inpitch);
- r2 = *p_ref;
- r3 = *(p_ref += inpitch); /* modify pointer before loading */
- r4 = *(p_ref += inpitch);
- /* first pixel */
- r5 = *(p_ref += inpitch);
- result = (r0 + r5);
- r0 = (r1 + r4);
- result -= (r0 * 5);//result -= r0; result -= (r0<<2);
- r0 = (r2 + r3);
- result += (r0 * 20);//result += (r0<<4); result += (r0<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* modify pointer before loading */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* second pixel */
- r0 = *(p_ref += inpitch);
- result = (r1 + r0);
- r1 = (r2 + r5);
- result -= (r1 * 5);//result -= r1; result -= (r1<<2);
- r1 = (r3 + r4);
- result += (r1 * 20);//result += (r1<<4); result += (r1<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* third pixel */
- r1 = *(p_ref += inpitch);
- result = (r2 + r1);
- r2 = (r3 + r0);
- result -= (r2 * 5);//result -= r2; result -= (r2<<2);
- r2 = (r4 + r5);
- result += (r2 * 20);//result += (r2<<4); result += (r2<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- /* fourth pixel */
- r2 = *(p_ref += inpitch);
- result = (r3 + r2);
- r3 = (r4 + r1);
- result -= (r3 * 5);//result -= r3; result -= (r3<<2);
- r3 = (r5 + r0);
- result += (r3 * 20);//result += (r3<<4); result += (r3<<2);
- result = (result + 16) >> 5;
- CLIP_RESULT(result)
- tmp_result = *(p_tmp8 += 24); /* intermediate result */
- result = (result + tmp_result + 1); /* no clip */
- result = (result >> 1);
- *(p_cur += outpitch) = result;
- p_ref -= (inpitch << 1); /* move back to center of the filter of the next one */
- }
- p_cur += (curr_offset - 3);
- }
- }
- }
-
- return ;
-}
-
-/* position G */
-void FullPelMC(uint8 *in, int inpitch, uint8 *out, int outpitch,
- int blkwidth, int blkheight)
-{
- int i, j;
- int offset_in = inpitch - blkwidth;
- int offset_out = outpitch - blkwidth;
- uint32 temp;
- uint8 byte;
-
- if (((uint32)in)&3)
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 4)
- {
- temp = *in++;
- byte = *in++;
- temp |= (byte << 8);
- byte = *in++;
- temp |= (byte << 16);
- byte = *in++;
- temp |= (byte << 24);
-
- *((uint32*)out) = temp; /* write 4 bytes */
- out += 4;
- }
- out += offset_out;
- in += offset_in;
- }
- }
- else
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 4)
- {
- temp = *((uint32*)in);
- *((uint32*)out) = temp;
- in += 4;
- out += 4;
- }
- out += offset_out;
- in += offset_in;
- }
- }
- return ;
-}
-
-void ChromaMotionComp(uint8 *ref, int picwidth, int picheight,
- int x_pos, int y_pos,
- uint8 *pred, int pred_pitch,
- int blkwidth, int blkheight)
-{
- int dx, dy;
- int offset_dx, offset_dy;
- int index;
- uint8 temp[24][24];
-
- dx = x_pos & 7;
- dy = y_pos & 7;
- offset_dx = (dx + 7) >> 3;
- offset_dy = (dy + 7) >> 3;
- x_pos = x_pos >> 3; /* round it to full-pel resolution */
- y_pos = y_pos >> 3;
-
- if ((x_pos >= 0 && x_pos + blkwidth + offset_dx <= picwidth) && (y_pos >= 0 && y_pos + blkheight + offset_dy <= picheight))
- {
- ref += y_pos * picwidth + x_pos;
- }
- else
- {
- CreatePad(ref, picwidth, picheight, x_pos, y_pos, &temp[0][0], blkwidth + offset_dx, blkheight + offset_dy);
- ref = &temp[0][0];
- picwidth = 24;
- }
-
- index = offset_dx + (offset_dy << 1) + ((blkwidth << 1) & 0x7);
-
- (*(ChromaMC_SIMD[index]))(ref, picwidth , dx, dy, pred, pred_pitch, blkwidth, blkheight);
- return ;
-}
-
-
-/* SIMD routines, unroll the loops in vertical direction, decreasing loops (things to be done) */
-void ChromaDiagonalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- int32 r0, r1, r2, r3, result0, result1;
- uint8 temp[288];
- uint8 *ref, *out;
- int i, j;
- int dx_8 = 8 - dx;
- int dy_8 = 8 - dy;
-
- /* horizontal first */
- out = temp;
- for (i = 0; i < blkheight + 1; i++)
- {
- ref = pRef;
- r0 = ref[0];
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 |= (ref[2] << 16);
- result0 = dx_8 * r0;
-
- r1 = ref[1] | (ref[3] << 16);
- result0 += dx * r1;
- *(int32 *)out = result0;
-
- result0 = dx_8 * r1;
-
- r2 = ref[4];
- r0 = r0 >> 16;
- r1 = r0 | (r2 << 16);
- result0 += dx * r1;
- *(int32 *)(out + 16) = result0;
-
- ref += 4;
- out += 4;
- r0 = r2;
- }
- pRef += srcPitch;
- out += (32 - blkwidth);
- }
-
-// pRef -= srcPitch*(blkheight+1);
- ref = temp;
-
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 = *(int32 *)ref;
- r1 = *(int32 *)(ref + 16);
- ref += 32;
- out = pOut;
- for (i = 0; i < (blkheight >> 1); i++)
- {
- result0 = dy_8 * r0 + 0x00200020;
- r2 = *(int32 *)ref;
- result0 += dy * r2;
- result0 >>= 6;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00200020;
- r3 = *(int32 *)(ref + 16);
- result1 += dy * r3;
- result1 >>= 6;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- out += predPitch;
- ref += 32;
-
- result0 = dy_8 * r0 + 0x00200020;
- r2 = *(int32 *)ref;
- result0 += dy * r2;
- result0 >>= 6;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00200020;
- r3 = *(int32 *)(ref + 16);
- result1 += dy * r3;
- result1 >>= 6;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- out += predPitch;
- ref += 32;
- }
- pOut += 4;
- ref = temp + 4; /* since it can only iterate twice max */
- }
- return;
-}
-
-void ChromaHorizontalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dy);
- int32 r0, r1, r2, result0, result1;
- uint8 *ref, *out;
- int i, j;
- int dx_8 = 8 - dx;
-
- /* horizontal first */
- for (i = 0; i < blkheight; i++)
- {
- ref = pRef;
- out = pOut;
-
- r0 = ref[0];
- for (j = 0; j < blkwidth; j += 4)
- {
- r0 |= (ref[2] << 16);
- result0 = dx_8 * r0 + 0x00040004;
-
- r1 = ref[1] | (ref[3] << 16);
- result0 += dx * r1;
- result0 >>= 3;
- result0 &= 0x00FF00FF;
-
- result1 = dx_8 * r1 + 0x00040004;
-
- r2 = ref[4];
- r0 = r0 >> 16;
- r1 = r0 | (r2 << 16);
- result1 += dx * r1;
- result1 >>= 3;
- result1 &= 0x00FF00FF;
-
- *(int32 *)out = result0 | (result1 << 8);
-
- ref += 4;
- out += 4;
- r0 = r2;
- }
-
- pRef += srcPitch;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaVerticalMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- int32 r0, r1, r2, r3, result0, result1;
- int i, j;
- uint8 *ref, *out;
- int dy_8 = 8 - dy;
- /* vertical first */
- for (i = 0; i < blkwidth; i += 4)
- {
- ref = pRef;
- out = pOut;
-
- r0 = ref[0] | (ref[2] << 16);
- r1 = ref[1] | (ref[3] << 16);
- ref += srcPitch;
- for (j = 0; j < blkheight; j++)
- {
- result0 = dy_8 * r0 + 0x00040004;
- r2 = ref[0] | (ref[2] << 16);
- result0 += dy * r2;
- result0 >>= 3;
- result0 &= 0x00FF00FF;
- r0 = r2;
-
- result1 = dy_8 * r1 + 0x00040004;
- r3 = ref[1] | (ref[3] << 16);
- result1 += dy * r3;
- result1 >>= 3;
- result1 &= 0x00FF00FF;
- r1 = r3;
- *(int32 *)out = result0 | (result1 << 8);
- ref += srcPitch;
- out += predPitch;
- }
- pOut += 4;
- pRef += 4;
- }
- return;
-}
-
-void ChromaDiagonalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(blkwidth);
- int32 r0, r1, temp0, temp1, result;
- int32 temp[9];
- int32 *out;
- int i, r_temp;
- int dy_8 = 8 - dy;
-
- /* horizontal first */
- out = temp;
- for (i = 0; i < blkheight + 1; i++)
- {
- r_temp = pRef[1];
- temp0 = (pRef[0] << 3) + dx * (r_temp - pRef[0]);
- temp1 = (r_temp << 3) + dx * (pRef[2] - r_temp);
- r0 = temp0 | (temp1 << 16);
- *out++ = r0;
- pRef += srcPitch;
- }
-
- pRef -= srcPitch * (blkheight + 1);
-
- out = temp;
-
- r0 = *out++;
-
- for (i = 0; i < blkheight; i++)
- {
- result = dy_8 * r0 + 0x00200020;
- r1 = *out++;
- result += dy * r1;
- result >>= 6;
- result &= 0x00FF00FF;
- *(int16 *)pOut = (result >> 8) | (result & 0xFF);
- r0 = r1;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaHorizontalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dy);
- OSCL_UNUSED_ARG(blkwidth);
- int i, temp, temp0, temp1;
-
- /* horizontal first */
- for (i = 0; i < blkheight; i++)
- {
- temp = pRef[1];
- temp0 = ((pRef[0] << 3) + dx * (temp - pRef[0]) + 4) >> 3;
- temp1 = ((temp << 3) + dx * (pRef[2] - temp) + 4) >> 3;
-
- *(int16 *)pOut = temp0 | (temp1 << 8);
- pRef += srcPitch;
- pOut += predPitch;
-
- }
- return;
-}
-void ChromaVerticalMC2_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- OSCL_UNUSED_ARG(blkwidth);
- int32 r0, r1, result;
- int i;
- int dy_8 = 8 - dy;
- r0 = pRef[0] | (pRef[1] << 16);
- pRef += srcPitch;
- for (i = 0; i < blkheight; i++)
- {
- result = dy_8 * r0 + 0x00040004;
- r1 = pRef[0] | (pRef[1] << 16);
- result += dy * r1;
- result >>= 3;
- result &= 0x00FF00FF;
- *(int16 *)pOut = (result >> 8) | (result & 0xFF);
- r0 = r1;
- pRef += srcPitch;
- pOut += predPitch;
- }
- return;
-}
-
-void ChromaFullMC_SIMD(uint8 *pRef, int srcPitch, int dx, int dy,
- uint8 *pOut, int predPitch, int blkwidth, int blkheight)
-{
- OSCL_UNUSED_ARG(dx);
- OSCL_UNUSED_ARG(dy);
- int i, j;
- int offset_in = srcPitch - blkwidth;
- int offset_out = predPitch - blkwidth;
- uint16 temp;
- uint8 byte;
-
- if (((uint32)pRef)&1)
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 2)
- {
- temp = *pRef++;
- byte = *pRef++;
- temp |= (byte << 8);
- *((uint16*)pOut) = temp; /* write 2 bytes */
- pOut += 2;
- }
- pOut += offset_out;
- pRef += offset_in;
- }
- }
- else
- {
- for (j = blkheight; j > 0; j--)
- {
- for (i = blkwidth; i > 0; i -= 2)
- {
- temp = *((uint16*)pRef);
- *((uint16*)pOut) = temp;
- pRef += 2;
- pOut += 2;
- }
- pOut += offset_out;
- pRef += offset_in;
- }
- }
- return ;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp b/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp
deleted file mode 100644
index 0b613a4..0000000
--- a/media/libstagefright/codecs/avc/dec/src/pred_intra.cpp
+++ /dev/null
@@ -1,1786 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-
-#define CLIP_COMP *comp++ = (uint8)(((uint)temp>0xFF)? 0xFF&(~(temp>>31)): temp)
-#define CLIP_RESULT(x) if((uint)x > 0xFF){ \
- x = 0xFF & (~(x>>31));}
-
-
-/* We should combine the Intra4x4 functions with residual decoding and compensation */
-AVCStatus IntraMBPrediction(AVCCommonObj *video)
-{
- int component, SubBlock_indx, temp;
- AVCStatus status;
- AVCMacroblock *currMB = video->currMB;
- AVCPictureData *currPic = video->currPic;
- uint8 *curL, *curCb, *curCr;
- uint8 *comp;
- int block_x, block_y, offset;
- int16 *dataBlock = video->block;
- uint8 *predCb, *predCr;
-#ifdef USE_PRED_BLOCK
- uint8 *pred;
-#endif
- int pitch = currPic->pitch;
- uint32 cbp4x4 = video->cbp4x4;
-
- offset = (video->mb_y << 4) * pitch + (video->mb_x << 4);
- curL = currPic->Sl + offset;
-
-#ifdef USE_PRED_BLOCK
- video->pred_block = video->pred + 84; /* point to separate prediction memory */
- pred = video->pred_block;
- video->pred_pitch = 20;
-#else
- video->pred_block = curL; /* point directly to the frame buffer */
- video->pred_pitch = pitch;
-#endif
-
- if (currMB->mbMode == AVC_I4)
- {
- /* luminance first */
- block_x = block_y = 0;
- for (component = 0; component < 4; component++)
- {
- block_x = ((component & 1) << 1);
- block_y = ((component >> 1) << 1);
- comp = curL;// + (block_x<<2) + (block_y<<2)*currPic->pitch;
-
- for (SubBlock_indx = 0; SubBlock_indx < 4; SubBlock_indx++)
- {
- status = Intra_4x4(video, block_x, block_y, comp);
- if (status != AVC_SUCCESS)
- {
- return status;
- }
- /* transform following the 4x4 prediction, can't be SIMD
- with other blocks. */
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&(1 << ((block_y << 2) + block_x)))
- {
- itrans(dataBlock, pred, pred, 20);
- }
-#else
- if (cbp4x4&(1 << ((block_y << 2) + block_x)))
- {
- itrans(dataBlock, comp, comp, pitch);
- }
-#endif
- temp = SubBlock_indx & 1;
- if (temp)
- {
- block_y++;
- block_x--;
- dataBlock += 60;
-#ifdef USE_PRED_BLOCK
- pred += 76;
-#else
- comp += ((pitch << 2) - 4);
-#endif
- }
- else
- {
- block_x++;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- pred += 4;
-#else
- comp += 4;
-#endif
- }
- }
- if (component&1)
- {
-#ifdef USE_PRED_BLOCK
- pred -= 8;
-#else
- curL += (pitch << 3) - 8;
-#endif
- dataBlock -= 8;
- }
- else
- {
-#ifdef USE_PRED_BLOCK
- pred -= 152;
-#else
- curL += 8;
-#endif
- dataBlock -= 120;
- }
- }
- cbp4x4 >>= 16;
- }
- else /* AVC_I16 */
- {
-#ifdef MB_BASED_DEBLOCK
- video->pintra_pred_top = video->intra_pred_top + (video->mb_x << 4);
- video->pintra_pred_left = video->intra_pred_left + 1;
- video->intra_pred_topleft = video->intra_pred_left[0];
- pitch = 1;
-#else
- video->pintra_pred_top = curL - pitch;
- video->pintra_pred_left = curL - 1;
- if (video->mb_y)
- {
- video->intra_pred_topleft = *(curL - pitch - 1);
- }
-#endif
- switch (currMB->i16Mode)
- {
- case AVC_I16_Vertical: /* Intra_16x16_Vertical */
- /* check availability of top */
- if (video->intraAvailB)
- {
- Intra_16x16_Vertical(video);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_I16_Horizontal: /* Intra_16x16_Horizontal */
- /* check availability of left */
- if (video->intraAvailA)
- {
- Intra_16x16_Horizontal(video, pitch);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_I16_DC: /* Intra_16x16_DC */
- Intra_16x16_DC(video, pitch);
- break;
- case AVC_I16_Plane: /* Intra_16x16_Plane */
- if (video->intraAvailA && video->intraAvailB && video->intraAvailD)
- {
- Intra_16x16_Plane(video, pitch);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- default:
- break;
- }
-
- pitch = currPic->pitch;
-
- /* transform */
- /* can go in raster scan order now */
- /* can be done in SIMD, */
- for (block_y = 4; block_y > 0; block_y--)
- {
- for (block_x = 4; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- itrans(dataBlock, pred, pred, 20);
- }
-#else
- if (cbp4x4&1)
- {
- itrans(dataBlock, curL, curL, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- pred += 4;
-#else
- curL += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- pred += 64;
-#else
- curL += ((pitch << 2) - 16);
-#endif
- }
- }
-
- offset = (offset >> 2) + (video->mb_x << 2); //((video->mb_y << 3)* pitch + (video->mb_x << 3));
- curCb = currPic->Scb + offset;
- curCr = currPic->Scr + offset;
-
-#ifdef MB_BASED_DEBLOCK
- video->pintra_pred_top_cb = video->intra_pred_top_cb + (video->mb_x << 3);
- video->pintra_pred_left_cb = video->intra_pred_left_cb + 1;
- video->intra_pred_topleft_cb = video->intra_pred_left_cb[0];
- video->pintra_pred_top_cr = video->intra_pred_top_cr + (video->mb_x << 3);
- video->pintra_pred_left_cr = video->intra_pred_left_cr + 1;
- video->intra_pred_topleft_cr = video->intra_pred_left_cr[0];
- pitch = 1;
-#else
- pitch >>= 1;
- video->pintra_pred_top_cb = curCb - pitch;
- video->pintra_pred_left_cb = curCb - 1;
- video->pintra_pred_top_cr = curCr - pitch;
- video->pintra_pred_left_cr = curCr - 1;
-
- if (video->mb_y)
- {
- video->intra_pred_topleft_cb = *(curCb - pitch - 1);
- video->intra_pred_topleft_cr = *(curCr - pitch - 1);
- }
-#endif
-
-#ifdef USE_PRED_BLOCK
- predCb = video->pred + 452;
- predCr = predCb + 144;
- video->pred_pitch = 12;
-#else
- predCb = curCb;
- predCr = curCr;
- video->pred_pitch = currPic->pitch >> 1;
-#endif
- /* chrominance */
- switch (currMB->intra_chroma_pred_mode)
- {
- case AVC_IC_DC: /* Intra_Chroma_DC */
- Intra_Chroma_DC(video, pitch, predCb, predCr);
- break;
- case AVC_IC_Horizontal: /* Intra_Chroma_Horizontal */
- if (video->intraAvailA)
- {
- /* check availability of left */
- Intra_Chroma_Horizontal(video, pitch, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_IC_Vertical: /* Intra_Chroma_Vertical */
- if (video->intraAvailB)
- {
- /* check availability of top */
- Intra_Chroma_Vertical(video, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- case AVC_IC_Plane: /* Intra_Chroma_Plane */
- if (video->intraAvailA && video->intraAvailB && video->intraAvailD)
- {
- /* check availability of top and left */
- Intra_Chroma_Plane(video, pitch, predCb, predCr);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
- default:
- break;
- }
-
- /* transform, done in raster scan manner */
- pitch = currPic->pitch >> 1;
-
- for (block_y = 2; block_y > 0; block_y--)
- {
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCb, predCb, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCb, curCb, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCb += 4;
-#else
- curCb += 4;
-#endif
- }
- for (block_x = 2; block_x > 0; block_x--)
- {
-#ifdef USE_PRED_BLOCK
- if (cbp4x4&1)
- {
- ictrans(dataBlock, predCr, predCr, 12);
- }
-#else
- if (cbp4x4&1)
- {
- ictrans(dataBlock, curCr, curCr, pitch);
- }
-#endif
- cbp4x4 >>= 1;
- dataBlock += 4;
-#ifdef USE_PRED_BLOCK
- predCr += 4;
-#else
- curCr += 4;
-#endif
- }
- dataBlock += 48;
-#ifdef USE_PRED_BLOCK
- predCb += 40;
- predCr += 40;
-#else
- curCb += ((pitch << 2) - 8);
- curCr += ((pitch << 2) - 8);
-#endif
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset);
-#endif
- return AVC_SUCCESS;
-}
-
-#ifdef MB_BASED_DEBLOCK
-void SaveNeighborForIntraPred(AVCCommonObj *video, int offset)
-{
- AVCPictureData *currPic = video->currPic;
- int pitch;
- uint8 *pred, *predCb, *predCr;
- uint8 *tmp_ptr, tmp_byte;
- uint32 tmp_word;
- int mb_x = video->mb_x;
-
- /* save the value for intra prediction */
-#ifdef USE_PRED_BLOCK
- pitch = 20;
- pred = video->pred + 384; /* bottom line for Y */
- predCb = pred + 152; /* bottom line for Cb */
- predCr = predCb + 144; /* bottom line for Cr */
-#else
- pitch = currPic->pitch;
- tmp_word = offset + (pitch << 2) - (pitch >> 1);
- predCb = currPic->Scb + tmp_word;/* bottom line for Cb */
- predCr = currPic->Scr + tmp_word;/* bottom line for Cr */
-
- offset = (offset << 2) - (mb_x << 4);
- pred = currPic->Sl + offset + (pitch << 4) - pitch;/* bottom line for Y */
-
-#endif
-
- video->intra_pred_topleft = video->intra_pred_top[(mb_x<<4)+15];
- video->intra_pred_topleft_cb = video->intra_pred_top_cb[(mb_x<<3)+7];
- video->intra_pred_topleft_cr = video->intra_pred_top_cr[(mb_x<<3)+7];
-
- /* then copy to video->intra_pred_top, intra_pred_top_cb, intra_pred_top_cr */
- /*memcpy(video->intra_pred_top + (mb_x<<4), pred, 16);
- memcpy(video->intra_pred_top_cb + (mb_x<<3), predCb, 8);
- memcpy(video->intra_pred_top_cr + (mb_x<<3), predCr, 8);*/
- tmp_ptr = video->intra_pred_top + (mb_x << 4);
- *((uint32*)tmp_ptr) = *((uint32*)pred);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(pred + 4));
- *((uint32*)(tmp_ptr + 8)) = *((uint32*)(pred + 8));
- *((uint32*)(tmp_ptr + 12)) = *((uint32*)(pred + 12));
- tmp_ptr = video->intra_pred_top_cb + (mb_x << 3);
- *((uint32*)tmp_ptr) = *((uint32*)predCb);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(predCb + 4));
- tmp_ptr = video->intra_pred_top_cr + (mb_x << 3);
- *((uint32*)tmp_ptr) = *((uint32*)predCr);
- *((uint32*)(tmp_ptr + 4)) = *((uint32*)(predCr + 4));
-
-
- /* now save last column */
-#ifdef USE_PRED_BLOCK
- pred = video->pred + 99; /* last column*/
-#else
- pred -= ((pitch << 4) - pitch - 15); /* last column */
-#endif
- tmp_ptr = video->intra_pred_left;
- tmp_word = video->intra_pred_topleft;
- tmp_byte = *(pred);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- tmp_word = *(pred += pitch);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(pred += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(pred += pitch);
-
- /* now for Cb */
-#ifdef USE_PRED_BLOCK
- predCb = video->pred + 459;
- pitch = 12;
-#else
- pitch >>= 1;
- predCb -= (7 * pitch - 7);
-#endif
- tmp_ptr = video->intra_pred_left_cb;
- tmp_word = video->intra_pred_topleft_cb;
- tmp_byte = *(predCb);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(predCb += pitch);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCb += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(predCb += pitch);
-
- /* now for Cr */
-#ifdef USE_PRED_BLOCK
- predCr = video->pred + 603;
-#else
- predCr -= (7 * pitch - 7);
-#endif
- tmp_ptr = video->intra_pred_left_cr;
- tmp_word = video->intra_pred_topleft_cr;
- tmp_byte = *(predCr);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)tmp_ptr) = tmp_word;
- tmp_word = *(predCr += pitch);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 8);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 16);
- tmp_byte = *(predCr += pitch);
- tmp_word |= (tmp_byte << 24);
- *((uint32*)(tmp_ptr += 4)) = tmp_word;
- *(tmp_ptr += 4) = *(predCr += pitch);
-
- return ;
-}
-#endif /* MB_BASED_DEBLOCK */
-
-AVCStatus Intra_4x4(AVCCommonObj *video, int block_x, int block_y, uint8 *comp)
-{
- AVCMacroblock *currMB = video->currMB;
- int block_offset;
- AVCNeighborAvailability availability;
- int pitch = video->currPic->pitch;
-
-#ifdef USE_PRED_BLOCK
- block_offset = (block_y * 80) + (block_x << 2);
-#else
- block_offset = (block_y << 2) * pitch + (block_x << 2);
-#endif
-
-#ifdef MB_BASED_DEBLOCK
- /* boundary blocks use video->pred_intra_top, pred_intra_left, pred_intra_topleft */
- if (!block_x)
- {
- video->pintra_pred_left = video->intra_pred_left + 1 + (block_y << 2);
- pitch = 1;
- }
- else
- {
- video->pintra_pred_left = video->pred_block + block_offset - 1;
- pitch = video->pred_pitch;
- }
-
- if (!block_y)
- {
- video->pintra_pred_top = video->intra_pred_top + (block_x << 2) + (video->mb_x << 4);
- }
- else
- {
- video->pintra_pred_top = video->pred_block + block_offset - video->pred_pitch;
- }
-
- if (!block_x)
- {
- video->intra_pred_topleft = video->intra_pred_left[block_y<<2];
- }
- else if (!block_y)
- {
- video->intra_pred_topleft = video->intra_pred_top[(video->mb_x<<4)+(block_x<<2)-1];
- }
- else
- {
- video->intra_pred_topleft = video->pred_block[block_offset - video->pred_pitch - 1];
- }
-
-#else
- /* normal case */
- video->pintra_pred_top = comp - pitch;
- video->pintra_pred_left = comp - 1;
- if (video->mb_y || block_y)
- {
- video->intra_pred_topleft = *(comp - pitch - 1);
- }
-#endif
-
- switch (currMB->i4Mode[(block_y << 2) + block_x])
- {
- case AVC_I4_Vertical: /* Intra_4x4_Vertical */
- if (block_y > 0 || video->intraAvailB)/* to prevent out-of-bound access*/
- {
- Intra_4x4_Vertical(video, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Horizontal: /* Intra_4x4_Horizontal */
- if (block_x || video->intraAvailA) /* to prevent out-of-bound access */
- {
- Intra_4x4_Horizontal(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_DC: /* Intra_4x4_DC */
- availability.left = TRUE;
- availability.top = TRUE;
- if (!block_y)
- { /* check availability up */
- availability.top = video->intraAvailB ;
- }
- if (!block_x)
- { /* check availability left */
- availability.left = video->intraAvailA ;
- }
- Intra_4x4_DC(video, pitch, block_offset, &availability);
- break;
-
- case AVC_I4_Diagonal_Down_Left: /* Intra_4x4_Diagonal_Down_Left */
- /* lookup table will be more appropriate for this case */
- if (block_y == 0 && !video->intraAvailB)
- {
- return AVC_FAIL;
- }
-
- availability.top_right = BlkTopRight[(block_y<<2) + block_x];
-
- if (availability.top_right == 2)
- {
- availability.top_right = video->intraAvailB;
- }
- else if (availability.top_right == 3)
- {
- availability.top_right = video->intraAvailC;
- }
-
- Intra_4x4_Down_Left(video, block_offset, &availability);
- break;
-
- case AVC_I4_Diagonal_Down_Right: /* Intra_4x4_Diagonal_Down_Right */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Down_Right(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Vertical_Right: /* Intra_4x4_Vertical_Right */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Vertical_Right(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Horizontal_Down: /* Intra_4x4_Horizontal_Down */
- if ((block_y && block_x) /* to prevent out-of-bound access */
- || (block_y && video->intraAvailA)
- || (block_x && video->intraAvailB)
- || (video->intraAvailA && video->intraAvailD && video->intraAvailB))
- {
- Intra_4x4_Diagonal_Horizontal_Down(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
- case AVC_I4_Vertical_Left: /* Intra_4x4_Vertical_Left */
- /* lookup table may be more appropriate for this case */
- if (block_y == 0 && !video->intraAvailB)
- {
- return AVC_FAIL;
- }
-
- availability.top_right = BlkTopRight[(block_y<<2) + block_x];
-
- if (availability.top_right == 2)
- {
- availability.top_right = video->intraAvailB;
- }
- else if (availability.top_right == 3)
- {
- availability.top_right = video->intraAvailC;
- }
-
- Intra_4x4_Vertical_Left(video, block_offset, &availability);
- break;
-
- case AVC_I4_Horizontal_Up: /* Intra_4x4_Horizontal_Up */
- if (block_x || video->intraAvailA)
- {
- Intra_4x4_Horizontal_Up(video, pitch, block_offset);
- }
- else
- {
- return AVC_FAIL;
- }
- break;
-
-
- default:
-
- break;
- }
-
- return AVC_SUCCESS;
-}
-
-
-/* =============================== BEGIN 4x4
-MODES======================================*/
-void Intra_4x4_Vertical(AVCCommonObj *video, int block_offset)
-{
- uint8 *comp_ref = video->pintra_pred_top;
- uint32 temp;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- /*P = (int) *comp_ref++;
- Q = (int) *comp_ref++;
- R = (int) *comp_ref++;
- S = (int) *comp_ref++;
- temp = S|(R<<8)|(Q<<16)|(P<<24);*/
- temp = *((uint32*)comp_ref);
-
- *((uint32*)pred) = temp; /* write 4 at a time */
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Horizontal(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_ref = video->pintra_pred_left;
- uint32 temp;
- int P;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- P = *comp_ref;
- temp = P | (P << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_DC(AVCCommonObj *video, int pitch, int block_offset,
- AVCNeighborAvailability *availability)
-{
- uint8 *comp_ref = video->pintra_pred_left;
- uint32 temp;
- int DC;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- if (availability->left)
- {
- DC = *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref += pitch;
- DC += *comp_ref;
- comp_ref = video->pintra_pred_top;
-
- if (availability->top)
- {
- DC = (comp_ref[0] + comp_ref[1] + comp_ref[2] + comp_ref[3] + DC + 4) >> 3;
- }
- else
- {
- DC = (DC + 2) >> 2;
-
- }
- }
- else if (availability->top)
- {
- comp_ref = video->pintra_pred_top;
- DC = (comp_ref[0] + comp_ref[1] + comp_ref[2] + comp_ref[3] + 2) >> 2;
-
- }
- else
- {
- DC = 128;
- }
-
- temp = DC | (DC << 8);
- temp = temp | (temp << 16);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
- pred += pred_pitch;
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Down_Left(AVCCommonObj *video, int block_offset,
- AVCNeighborAvailability *availability)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint32 temp;
- int r0, r1, r2, r3, r4, r5, r6, r7;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- r0 = *comp_refx++;
- r1 = *comp_refx++;
- r2 = *comp_refx++;
- r3 = *comp_refx++;
- if (availability->top_right)
- {
- r4 = *comp_refx++;
- r5 = *comp_refx++;
- r6 = *comp_refx++;
- r7 = *comp_refx++;
- }
- else
- {
- r4 = r3;
- r5 = r3;
- r6 = r3;
- r7 = r3;
- }
-
- r0 += (r1 << 1);
- r0 += r2;
- r0 += 2;
- r0 >>= 2;
- r1 += (r2 << 1);
- r1 += r3;
- r1 += 2;
- r1 >>= 2;
- r2 += (r3 << 1);
- r2 += r4;
- r2 += 2;
- r2 >>= 2;
- r3 += (r4 << 1);
- r3 += r5;
- r3 += 2;
- r3 >>= 2;
- r4 += (r5 << 1);
- r4 += r6;
- r4 += 2;
- r4 >>= 2;
- r5 += (r6 << 1);
- r5 += r7;
- r5 += 2;
- r5 >>= 2;
- r6 += (3 * r7);
- r6 += 2;
- r6 >>= 2;
-
- temp = r0 | (r1 << 8);
- temp |= (r2 << 16);
- temp |= (r3 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r4 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r5 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = (temp >> 8) | (r6 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Down_Right(AVCCommonObj *video, int pitch, int
- block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P_x, Q_x, R_x, P_y, Q_y, R_y, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- temp = *((uint32*)comp_refx); /* read 4 bytes */
- x0 = temp & 0xFF;
- x1 = (temp >> 8) & 0xFF;
- x2 = (temp >> 16) & 0xFF;
-
- Q_x = (x0 + 2 * x1 + x2 + 2) >> 2;
- R_x = (x1 + 2 * x2 + (temp >> 24) + 2) >> 2;
-
- x2 = video->intra_pred_topleft; /* re-use x2 instead of y0 */
- P_x = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* re-use x1 instead of y1 */
- D = (x0 + 2 * x2 + x1 + 2) >> 2;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* re-use x0 instead of y2 */
- P_y = (x2 + 2 * x1 + x0 + 2) >> 2;
-
- x2 = *comp_refy;
- comp_refy += pitch; /* re-use x2 instead of y3 */
- Q_y = (x1 + 2 * x0 + x2 + 2) >> 2;
-
- x1 = *comp_refy; /* re-use x1 instead of y4 */
- R_y = (x0 + 2 * x2 + x1 + 2) >> 2;
-
- /* we can pack these */
- temp = D | (P_x << 8); //[D P_x Q_x R_x]
- //[P_y D P_x Q_x]
- temp |= (Q_x << 16); //[Q_y P_y D P_x]
- temp |= (R_x << 24); //[R_y Q_y P_y D ]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = P_y | (D << 8);
- temp |= (P_x << 16);
- temp |= (Q_x << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q_y | (P_y << 8);
- temp |= (D << 16);
- temp |= (P_x << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = R_y | (Q_y << 8);
- temp |= (P_y << 16);
- temp |= (D << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Vertical_Right(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P0, Q0, R0, S0, P1, Q1, R1, P2, Q2, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- Q0 = x0 + x1 + 1;
-
- x2 = *comp_refx++;
- R0 = x1 + x2 + 1;
-
- x1 = *comp_refx++; /* reuse x1 instead of x3 */
- S0 = x2 + x1 + 1;
-
- x1 = video->intra_pred_topleft; /* reuse x1 instead of y0 */
- P0 = x1 + x0 + 1;
-
- x2 = *comp_refy;
- comp_refy += pitch; /* reuse x2 instead of y1 */
- D = (x2 + 2 * x1 + x0 + 2) >> 2;
-
- P1 = (P0 + Q0) >> 2;
- Q1 = (Q0 + R0) >> 2;
- R1 = (R0 + S0) >> 2;
-
- P0 >>= 1;
- Q0 >>= 1;
- R0 >>= 1;
- S0 >>= 1;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* reuse x0 instead of y2 */
- P2 = (x1 + 2 * x2 + x0 + 2) >> 2;
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y3 */
- Q2 = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- temp = P0 | (Q0 << 8); //[P0 Q0 R0 S0]
- //[D P1 Q1 R1]
- temp |= (R0 << 16); //[P2 P0 Q0 R0]
- temp |= (S0 << 24); //[Q2 D P1 Q1]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = D | (P1 << 8);
- temp |= (Q1 << 16);
- temp |= (R1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = P2 | (P0 << 8);
- temp |= (Q0 << 16);
- temp |= (R0 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q2 | (D << 8);
- temp |= (P1 << 16);
- temp |= (Q1 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Diagonal_Horizontal_Down(AVCCommonObj *video, int pitch,
- int block_offset)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int P0, Q0, R0, S0, P1, Q1, R1, P2, Q2, D;
- int x0, x1, x2;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- x2 = *comp_refx++;
- Q2 = (x0 + 2 * x1 + x2 + 2) >> 2;
-
- x2 = video->intra_pred_topleft; /* reuse x2 instead of y0 */
- P2 = (x2 + 2 * x0 + x1 + 2) >> 2;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y1 */
- D = (x1 + 2 * x2 + x0 + 2) >> 2;
- P0 = x2 + x1 + 1;
-
- x0 = *comp_refy;
- comp_refy += pitch; /* reuse x0 instead of y2 */
- Q0 = x1 + x0 + 1;
-
- x1 = *comp_refy;
- comp_refy += pitch; /* reuse x1 instead of y3 */
- R0 = x0 + x1 + 1;
-
- x2 = *comp_refy; /* reuse x2 instead of y4 */
- S0 = x1 + x2 + 1;
-
- P1 = (P0 + Q0) >> 2;
- Q1 = (Q0 + R0) >> 2;
- R1 = (R0 + S0) >> 2;
-
- P0 >>= 1;
- Q0 >>= 1;
- R0 >>= 1;
- S0 >>= 1;
-
-
- /* we can pack these */
- temp = P0 | (D << 8); //[P0 D P2 Q2]
- //[Q0 P1 P0 D ]
- temp |= (P2 << 16); //[R0 Q1 Q0 P1]
- temp |= (Q2 << 24); //[S0 R1 R0 Q1]
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = Q0 | (P1 << 8);
- temp |= (P0 << 16);
- temp |= (D << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = R0 | (Q1 << 8);
- temp |= (Q0 << 16);
- temp |= (P1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = S0 | (R1 << 8);
- temp |= (R0 << 16);
- temp |= (Q1 << 24);
- *((uint32*)pred) = temp;
-
- return ;
-}
-
-void Intra_4x4_Vertical_Left(AVCCommonObj *video, int block_offset, AVCNeighborAvailability *availability)
-{
- uint8 *comp_refx = video->pintra_pred_top;
- uint32 temp1, temp2;
- int x0, x1, x2, x3, x4, x5, x6;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- x0 = *comp_refx++;
- x1 = *comp_refx++;
- x2 = *comp_refx++;
- x3 = *comp_refx++;
- if (availability->top_right)
- {
- x4 = *comp_refx++;
- x5 = *comp_refx++;
- x6 = *comp_refx++;
- }
- else
- {
- x4 = x3;
- x5 = x3;
- x6 = x3;
- }
-
- x0 += x1 + 1;
- x1 += x2 + 1;
- x2 += x3 + 1;
- x3 += x4 + 1;
- x4 += x5 + 1;
- x5 += x6 + 1;
-
- temp1 = (x0 >> 1);
- temp1 |= ((x1 >> 1) << 8);
- temp1 |= ((x2 >> 1) << 16);
- temp1 |= ((x3 >> 1) << 24);
-
- *((uint32*)pred) = temp1;
- pred += pred_pitch;
-
- temp2 = ((x0 + x1) >> 2);
- temp2 |= (((x1 + x2) >> 2) << 8);
- temp2 |= (((x2 + x3) >> 2) << 16);
- temp2 |= (((x3 + x4) >> 2) << 24);
-
- *((uint32*)pred) = temp2;
- pred += pred_pitch;
-
- temp1 = (temp1 >> 8) | ((x4 >> 1) << 24); /* rotate out old value */
- *((uint32*)pred) = temp1;
- pred += pred_pitch;
-
- temp2 = (temp2 >> 8) | (((x4 + x5) >> 2) << 24); /* rotate out old value */
- *((uint32*)pred) = temp2;
- pred += pred_pitch;
-
- return ;
-}
-
-void Intra_4x4_Horizontal_Up(AVCCommonObj *video, int pitch, int block_offset)
-{
- uint8 *comp_refy = video->pintra_pred_left;
- uint32 temp;
- int Q0, R0, Q1, D0, D1, P0, P1;
- int y0, y1, y2, y3;
- uint8 *pred = video->pred_block + block_offset;
- int pred_pitch = video->pred_pitch;
-
- y0 = *comp_refy;
- comp_refy += pitch;
- y1 = *comp_refy;
- comp_refy += pitch;
- y2 = *comp_refy;
- comp_refy += pitch;
- y3 = *comp_refy;
-
- Q0 = (y1 + y2 + 1) >> 1;
- Q1 = (y1 + (y2 << 1) + y3 + 2) >> 2;
- P0 = ((y0 + y1 + 1) >> 1);
- P1 = ((y0 + (y1 << 1) + y2 + 2) >> 2);
-
- temp = P0 | (P1 << 8); // [P0 P1 Q0 Q1]
- temp |= (Q0 << 16); // [Q0 Q1 R0 DO]
- temp |= (Q1 << 24); // [R0 D0 D1 D1]
- *((uint32*)pred) = temp; // [D1 D1 D1 D1]
- pred += pred_pitch;
-
- D0 = (y2 + 3 * y3 + 2) >> 2;
- R0 = (y2 + y3 + 1) >> 1;
-
- temp = Q0 | (Q1 << 8);
- temp |= (R0 << 16);
- temp |= (D0 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- D1 = y3;
-
- temp = R0 | (D0 << 8);
- temp |= (D1 << 16);
- temp |= (D1 << 24);
- *((uint32*)pred) = temp;
- pred += pred_pitch;
-
- temp = D1 | (D1 << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
-
- return ;
-}
-/* =============================== END 4x4 MODES======================================*/
-void Intra_16x16_Vertical(AVCCommonObj *video)
-{
- int i;
- uint32 temp1, temp2, temp3, temp4;
- uint8 *comp_ref = video->pintra_pred_top;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- temp1 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp2 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp3 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- temp4 = *((uint32*)comp_ref);
- comp_ref += 4;
-
- i = 16;
- while (i > 0)
- {
- *((uint32*)pred) = temp1;
- *((uint32*)(pred + 4)) = temp2;
- *((uint32*)(pred + 8)) = temp3;
- *((uint32*)(pred + 12)) = temp4;
- pred += pred_pitch;
- i--;
- }
-
- return ;
-}
-
-void Intra_16x16_Horizontal(AVCCommonObj *video, int pitch)
-{
- int i;
- uint32 temp;
- uint8 *comp_ref = video->pintra_pred_left;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- for (i = 0; i < 16; i++)
- {
- temp = *comp_ref;
- temp |= (temp << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- *((uint32*)(pred + 8)) = temp;
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- comp_ref += pitch;
- }
-}
-
-
-void Intra_16x16_DC(AVCCommonObj *video, int pitch)
-{
- int i;
- uint32 temp, temp2;
- uint8 *comp_ref_x = video->pintra_pred_top;
- uint8 *comp_ref_y = video->pintra_pred_left;
- int sum = 0;
- uint8 *pred = video->pred_block;
- int pred_pitch = video->pred_pitch;
-
- if (video->intraAvailB)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum = temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- sum += temp + (temp >> 16);
- sum &= 0xFFFF;
-
- if (video->intraAvailA)
- {
- for (i = 0; i < 16; i++)
- {
- sum += (*comp_ref_y);
- comp_ref_y += pitch;
- }
- sum = (sum + 16) >> 5;
- }
- else
- {
- sum = (sum + 8) >> 4;
- }
- }
- else if (video->intraAvailA)
- {
- for (i = 0; i < 16; i++)
- {
- sum += *comp_ref_y;
- comp_ref_y += pitch;
- }
- sum = (sum + 8) >> 4;
- }
- else
- {
- sum = 128;
- }
-
- temp = sum | (sum << 8);
- temp |= (temp << 16);
-
- for (i = 0; i < 16; i++)
- {
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- *((uint32*)(pred + 8)) = temp;
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- }
-
-}
-
-void Intra_16x16_Plane(AVCCommonObj *video, int pitch)
-{
- int i, a_16, b, c, factor_c;
- uint8 *comp_ref_x = video->pintra_pred_top;
- uint8 *comp_ref_y = video->pintra_pred_left;
- uint8 *comp_ref_x0, *comp_ref_x1, *comp_ref_y0, *comp_ref_y1;
- int H = 0, V = 0 , tmp;
- uint8 *pred = video->pred_block;
- uint32 temp;
- uint8 byte1, byte2, byte3;
- int value;
- int pred_pitch = video->pred_pitch;
-
- comp_ref_x0 = comp_ref_x + 8;
- comp_ref_x1 = comp_ref_x + 6;
- comp_ref_y0 = comp_ref_y + (pitch << 3);
- comp_ref_y1 = comp_ref_y + 6 * pitch;
-
- for (i = 1; i < 8; i++)
- {
- H += i * (*comp_ref_x0++ - *comp_ref_x1--);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
- comp_ref_y0 += pitch;
- comp_ref_y1 -= pitch;
- }
-
- H += i * (*comp_ref_x0++ - video->intra_pred_topleft);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
-
-
- a_16 = ((*(comp_ref_x + 15) + *(comp_ref_y + 15 * pitch)) << 4) + 16;;
- b = (5 * H + 32) >> 6;
- c = (5 * V + 32) >> 6;
-
- tmp = 0;
-
- for (i = 0; i < 16; i++)
- {
- factor_c = a_16 + c * (tmp++ - 7);
-
- factor_c -= 7 * b;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)pred) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 4)) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 8)) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 12)) = temp;
- pred += pred_pitch;
- }
-}
-
-/************** Chroma intra prediction *********************/
-
-void Intra_Chroma_DC(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- uint32 temp, temp2, pred_a, pred_b;
- uint8 *comp_ref_x, *comp_ref_y;
- uint8 *comp_ref_cb_x = video->pintra_pred_top_cb;
- uint8 *comp_ref_cb_y = video->pintra_pred_left_cb;
- uint8 *comp_ref_cr_x = video->pintra_pred_top_cr;
- uint8 *comp_ref_cr_y = video->pintra_pred_left_cr;
- int component, j;
- int sum_x0, sum_x1, sum_y0, sum_y1;
- int pred_0[2], pred_1[2], pred_2[2], pred_3[2];
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- if (video->intraAvailB & video->intraAvailA)
- {
- comp_ref_x = comp_ref_cb_x;
- comp_ref_y = comp_ref_cb_y;
- for (i = 0; i < 2; i++)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x0 = temp & 0xFFFF;
-
- temp = *((uint32*)comp_ref_x);
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x1 = temp & 0xFFFF;
-
- pred_1[i] = (sum_x1 + 2) >> 2;
-
- sum_y0 = *comp_ref_y;
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
-
- sum_y1 = *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
-
- pred_2[i] = (sum_y1 + 2) >> 2;
-
- pred_0[i] = (sum_y0 + sum_x0 + 4) >> 3;
- pred_3[i] = (sum_y1 + sum_x1 + 4) >> 3;
-
- comp_ref_x = comp_ref_cr_x;
- comp_ref_y = comp_ref_cr_y;
- }
- }
-
- else if (video->intraAvailA)
- {
- comp_ref_y = comp_ref_cb_y;
- for (i = 0; i < 2; i++)
- {
- sum_y0 = *comp_ref_y;
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
- sum_y0 += *(comp_ref_y += pitch);
-
- sum_y1 = *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
- sum_y1 += *(comp_ref_y += pitch);
-
- pred_0[i] = pred_1[i] = (sum_y0 + 2) >> 2;
- pred_2[i] = pred_3[i] = (sum_y1 + 2) >> 2;
- comp_ref_y = comp_ref_cr_y;
- }
- }
- else if (video->intraAvailB)
- {
- comp_ref_x = comp_ref_cb_x;
- for (i = 0; i < 2; i++)
- {
- temp = *((uint32*)comp_ref_x);
- comp_ref_x += 4;
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x0 = temp & 0xFFFF;
-
- temp = *((uint32*)comp_ref_x);
- temp2 = (temp >> 8) & 0xFF00FF;
- temp &= 0xFF00FF;
- temp += temp2;
- temp += (temp >> 16);
- sum_x1 = temp & 0xFFFF;
-
- pred_0[i] = pred_2[i] = (sum_x0 + 2) >> 2;
- pred_1[i] = pred_3[i] = (sum_x1 + 2) >> 2;
- comp_ref_x = comp_ref_cr_x;
- }
- }
- else
- {
- pred_0[0] = pred_0[1] = pred_1[0] = pred_1[1] =
- pred_2[0] = pred_2[1] = pred_3[0] = pred_3[1] = 128;
- }
-
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- pred_a = pred_0[component];
- pred_b = pred_1[component];
- pred_a |= (pred_a << 8);
- pred_a |= (pred_a << 16);
- pred_b |= (pred_b << 8);
- pred_b |= (pred_b << 16);
-
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++) /* 4 lines */
- {
- *((uint32*)pred) = pred_a;
- *((uint32*)(pred + 4)) = pred_b;
- pred += pred_pitch; /* move to the next line */
- }
- pred_a = pred_2[component];
- pred_b = pred_3[component];
- pred_a |= (pred_a << 8);
- pred_a |= (pred_a << 16);
- pred_b |= (pred_b << 8);
- pred_b |= (pred_b << 16);
- }
- pred = predCr; /* point to cr */
- }
-}
-
-void Intra_Chroma_Horizontal(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- uint32 temp;
- uint8 *comp_ref_cb_y = video->pintra_pred_left_cb;
- uint8 *comp_ref_cr_y = video->pintra_pred_left_cr;
- uint8 *comp;
- int component, j;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp = comp_ref_cb_y;
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++)
- {
- temp = *comp;
- comp += pitch;
- temp |= (temp << 8);
- temp |= (temp << 16);
- *((uint32*)pred) = temp;
- *((uint32*)(pred + 4)) = temp;
- pred += pred_pitch;
- }
- }
- comp = comp_ref_cr_y;
- pred = predCr; /* point to cr */
- }
-
-}
-
-void Intra_Chroma_Vertical(AVCCommonObj *video, uint8 *predCb, uint8 *predCr)
-{
- uint32 temp1, temp2;
- uint8 *comp_ref_cb_x = video->pintra_pred_top_cb;
- uint8 *comp_ref_cr_x = video->pintra_pred_top_cr;
- uint8 *comp_ref;
- int component, j;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp_ref = comp_ref_cb_x;
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- temp1 = *((uint32*)comp_ref);
- temp2 = *((uint32*)(comp_ref + 4));
- for (j = 0; j < 8; j++)
- {
- *((uint32*)pred) = temp1;
- *((uint32*)(pred + 4)) = temp2;
- pred += pred_pitch;
- }
- comp_ref = comp_ref_cr_x;
- pred = predCr; /* point to cr */
- }
-
-}
-
-void Intra_Chroma_Plane(AVCCommonObj *video, int pitch, uint8 *predCb, uint8 *predCr)
-{
- int i;
- int a_16_C[2], b_C[2], c_C[2], a_16, b, c, factor_c;
- uint8 *comp_ref_x, *comp_ref_y, *comp_ref_x0, *comp_ref_x1, *comp_ref_y0, *comp_ref_y1;
- int component, j;
- int H, V, tmp;
- uint32 temp;
- uint8 byte1, byte2, byte3;
- int value;
- uint8 topleft;
- int pred_pitch = video->pred_pitch;
- uint8 *pred;
-
- comp_ref_x = video->pintra_pred_top_cb;
- comp_ref_y = video->pintra_pred_left_cb;
- topleft = video->intra_pred_topleft_cb;
-
- for (component = 0; component < 2; component++)
- {
- H = V = 0;
- comp_ref_x0 = comp_ref_x + 4;
- comp_ref_x1 = comp_ref_x + 2;
- comp_ref_y0 = comp_ref_y + (pitch << 2);
- comp_ref_y1 = comp_ref_y + (pitch << 1);
- for (i = 1; i < 4; i++)
- {
- H += i * (*comp_ref_x0++ - *comp_ref_x1--);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
- comp_ref_y0 += pitch;
- comp_ref_y1 -= pitch;
- }
- H += i * (*comp_ref_x0++ - topleft);
- V += i * (*comp_ref_y0 - *comp_ref_y1);
-
- a_16_C[component] = ((*(comp_ref_x + 7) + *(comp_ref_y + 7 * pitch)) << 4) + 16;
- b_C[component] = (17 * H + 16) >> 5;
- c_C[component] = (17 * V + 16) >> 5;
-
- comp_ref_x = video->pintra_pred_top_cr;
- comp_ref_y = video->pintra_pred_left_cr;
- topleft = video->intra_pred_topleft_cr;
- }
-
- pred = predCb;
- for (component = 0; component < 2; component++)
- {
- a_16 = a_16_C[component];
- b = b_C[component];
- c = c_C[component];
- tmp = 0;
- for (i = 4; i < 6; i++)
- {
- for (j = 0; j < 4; j++)
- {
- factor_c = a_16 + c * (tmp++ - 3);
-
- factor_c -= 3 * b;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)pred) = temp;
-
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte1 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte2 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- byte3 = value;
- value = factor_c >> 5;
- factor_c += b;
- CLIP_RESULT(value)
- temp = byte1 | (byte2 << 8);
- temp |= (byte3 << 16);
- temp |= (value << 24);
- *((uint32*)(pred + 4)) = temp;
- pred += pred_pitch;
- }
- }
- pred = predCr; /* point to cr */
- }
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/residual.cpp b/media/libstagefright/codecs/avc/dec/src/residual.cpp
deleted file mode 100644
index c68550d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/residual.cpp
+++ /dev/null
@@ -1,523 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-
-#include <string.h>
-
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-AVCDec_Status DecodeIntraPCM(AVCCommonObj *video, AVCDecBitstream *stream)
-{
- AVCDec_Status status;
- int j;
- int mb_x, mb_y, offset1;
- uint8 *pDst;
- uint32 byte0, byte1;
- int pitch;
-
- mb_x = video->mb_x;
- mb_y = video->mb_y;
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 84;
- pitch = 20;
-#else
- offset1 = (mb_x << 4) + (mb_y << 4) * video->PicWidthInSamplesL;
- pDst = video->currPic->Sl + offset1;
- pitch = video->currPic->pitch;
-#endif
-
- /* at this point bitstream is byte-aligned */
- j = 16;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 8)) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 12)) = byte0;
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 452;
- pitch = 12;
-#else
- offset1 = (offset1 >> 2) + (mb_x << 2);
- pDst = video->currPic->Scb + offset1;
- pitch >>= 1;
-#endif
-
- j = 8;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef USE_PRED_BLOCK
- pDst = video->pred_block + 596;
- pitch = 12;
-#else
- pDst = video->currPic->Scr + offset1;
-#endif
- j = 8;
- while (j > 0)
- {
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)pDst) = byte0;
-
- status = BitstreamReadBits(stream, 8, (uint*) & byte0);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 8);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 16);
- status = BitstreamReadBits(stream, 8, (uint*) & byte1);
- byte0 |= (byte1 << 24);
- *((uint32*)(pDst + 4)) = byte0;
-
- j--;
- pDst += pitch;
-
- if (status != AVCDEC_SUCCESS) /* check only once per line */
- return status;
- }
-
-#ifdef MB_BASED_DEBLOCK
- SaveNeighborForIntraPred(video, offset1);
-#endif
-
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* see subclause 7.3.5.3 and readCBPandCoeffsFromNAL() in JM*/
-AVCDec_Status residual(AVCDecObject *decvid, AVCMacroblock *currMB)
-{
- AVCCommonObj *video = decvid->common;
- int16 *block;
- int level[16], run[16], numcoeff; /* output from residual_block_cavlc */
- int block_x, i, j, k, idx, iCbCr;
- int mbPartIdx, subMbPartIdx, mbPartIdx_X, mbPartIdx_Y;
- int nC, maxNumCoeff = 16;
- int coeffNum, start_scan = 0;
- uint8 *zz_scan;
- int Rq, Qq;
- uint32 cbp4x4 = 0;
-
- /* in 8.5.4, it only says if it's field macroblock. */
-
- zz_scan = (uint8*) ZZ_SCAN_BLOCK;
-
-
- /* see 8.5.8 for the initialization of these values */
- Qq = video->QPy_div_6;
- Rq = video->QPy_mod_6;
-
- memset(video->block, 0, sizeof(int16)*NUM_PIXELS_IN_MB);
-
- if (currMB->mbMode == AVC_I16)
- {
- nC = predict_nnz(video, 0, 0);
- decvid->residual_block(decvid, nC, 16, level, run, &numcoeff);
- /* then performs zigzag and transform */
- block = video->block;
- coeffNum = -1;
- for (i = numcoeff - 1; i >= 0; i--)
- {
- coeffNum += run[i] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum] << 2;
- /* idx = ((idx>>2)<<6) + ((idx&3)<<2); */
- block[idx] = level[i];
- }
-
- /* inverse transform on Intra16x16DCLevel */
- if (numcoeff)
- {
- Intra16DCTrans(block, Qq, Rq);
- cbp4x4 = 0xFFFF;
- }
- maxNumCoeff = 15;
- start_scan = 1;
- }
-
- memset(currMB->nz_coeff, 0, sizeof(uint8)*24);
-
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- mbPartIdx_X = (mbPartIdx & 1) << 1;
- mbPartIdx_Y = mbPartIdx & -2;
-
- if (currMB->CBP&(1 << mbPartIdx))
- {
- for (subMbPartIdx = 0; subMbPartIdx < 4; subMbPartIdx++)
- {
- i = mbPartIdx_X + (subMbPartIdx & 1); // check this
- j = mbPartIdx_Y + (subMbPartIdx >> 1);
- block = video->block + (j << 6) + (i << 2); //
- nC = predict_nnz(video, i, j);
- decvid->residual_block(decvid, nC, maxNumCoeff, level, run, &numcoeff);
-
- /* convert to raster scan and quantize*/
- /* Note: for P mb in SP slice and SI mb in SI slice,
- the quantization cannot be done here.
- block[idx] should be assigned with level[k].
- itrans will be done after the prediction.
- There will be transformation on the predicted value,
- then addition with block[idx], then this quantization
- and transform.*/
-
- coeffNum = -1 + start_scan;
- for (k = numcoeff - 1; k >= 0; k--)
- {
- coeffNum += run[k] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum];
- block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq ;
- }
-
- currMB->nz_coeff[(j<<2)+i] = numcoeff;
- if (numcoeff)
- {
- cbp4x4 |= (1 << ((j << 2) + i));
- }
- }
- }
- }
-
- Qq = video->QPc_div_6;
- Rq = video->QPc_mod_6;
-
- if (currMB->CBP & (3 << 4)) /* chroma DC residual present */
- {
- for (iCbCr = 0; iCbCr < 2; iCbCr++)
- {
- decvid->residual_block(decvid, -1, 4, level, run, &numcoeff);
- block = video->block + 256 + (iCbCr << 3);
- coeffNum = -1;
- for (i = numcoeff - 1; i >= 0; i--)
- {
- coeffNum += run[i] + 1;
- if (coeffNum > 3)
- {
- return AVCDEC_FAIL;
- }
- block[(coeffNum>>1)*64 + (coeffNum&1)*4] = level[i];
- }
- /* inverse transform on chroma DC */
- /* for P in SP and SI in SI, this function can't be done here,
- must do prediction transform/quant first. */
- if (numcoeff)
- {
- ChromaDCTrans(block, Qq, Rq);
- cbp4x4 |= (iCbCr ? 0xcc0000 : 0x330000);
- }
- }
- }
-
- if (currMB->CBP & (2 << 4))
- {
- for (block_x = 0; block_x < 4; block_x += 2) /* for iCbCr */
- {
- for (j = 4; j < 6; j++) /* for each block inside Cb or Cr */
- {
- for (i = block_x; i < block_x + 2; i++)
- {
-
- block = video->block + (j << 6) + (i << 2);
-
- nC = predict_nnz_chroma(video, i, j);
- decvid->residual_block(decvid, nC, 15, level, run, &numcoeff);
-
- /* convert to raster scan and quantize */
- /* for P MB in SP slice and SI MB in SI slice,
- the dequant and transform cannot be done here.
- It needs the prediction values. */
- coeffNum = 0;
- for (k = numcoeff - 1; k >= 0; k--)
- {
- coeffNum += run[k] + 1;
- if (coeffNum > 15)
- {
- return AVCDEC_FAIL;
- }
- idx = zz_scan[coeffNum];
- block[idx] = (level[k] * dequant_coefres[Rq][coeffNum]) << Qq;
- }
-
-
- /* then transform */
- // itrans(block); /* transform */
- currMB->nz_coeff[(j<<2)+i] = numcoeff; //
- if (numcoeff)
- {
- cbp4x4 |= (1 << ((j << 2) + i));
- }
- }
-
- }
- }
- }
-
- video->cbp4x4 = cbp4x4;
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.3.1 and 9.2 and readCoeff4x4_CAVLC() in JM */
-AVCDec_Status residual_block_cavlc(AVCDecObject *decvid, int nC, int maxNumCoeff,
- int *level, int *run, int *numcoeff)
-{
- int i, j;
- int TrailingOnes, TotalCoeff;
- AVCDecBitstream *stream = decvid->bitstream;
- int suffixLength;
- uint trailing_ones_sign_flag, level_prefix, level_suffix;
- int levelCode, levelSuffixSize, zerosLeft;
- int run_before;
-
-
- if (nC >= 0)
- {
- ce_TotalCoeffTrailingOnes(stream, &TrailingOnes, &TotalCoeff, nC);
- }
- else
- {
- ce_TotalCoeffTrailingOnesChromaDC(stream, &TrailingOnes, &TotalCoeff);
- }
-
- *numcoeff = TotalCoeff;
-
- /* This part is done quite differently in ReadCoef4x4_CAVLC() */
- if (TotalCoeff == 0)
- {
- return AVCDEC_SUCCESS;
- }
-
- if (TrailingOnes) /* keep reading the sign of those trailing ones */
- {
- /* instead of reading one bit at a time, read the whole thing at once */
- BitstreamReadBits(stream, TrailingOnes, &trailing_ones_sign_flag);
- trailing_ones_sign_flag <<= 1;
- for (i = 0; i < TrailingOnes; i++)
- {
- level[i] = 1 - ((trailing_ones_sign_flag >> (TrailingOnes - i - 1)) & 2);
- }
- }
-
- i = TrailingOnes;
- suffixLength = 1;
- if (TotalCoeff > TrailingOnes)
- {
- ce_LevelPrefix(stream, &level_prefix);
- if (TotalCoeff < 11 || TrailingOnes == 3)
- {
- if (level_prefix < 14)
- {
-// levelSuffixSize = 0;
- levelCode = level_prefix;
- }
- else if (level_prefix == 14)
- {
-// levelSuffixSize = 4;
- BitstreamReadBits(stream, 4, &level_suffix);
- levelCode = 14 + level_suffix;
- }
- else /* if (level_prefix == 15) */
- {
-// levelSuffixSize = 12;
- BitstreamReadBits(stream, 12, &level_suffix);
- levelCode = 30 + level_suffix;
- }
- }
- else
- {
- /* suffixLength = 1; */
- if (level_prefix < 15)
- {
- levelSuffixSize = suffixLength;
- }
- else
- {
- levelSuffixSize = 12;
- }
- BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
-
- levelCode = (level_prefix << 1) + level_suffix;
- }
-
- if (TrailingOnes < 3)
- {
- levelCode += 2;
- }
-
- level[i] = (levelCode + 2) >> 1;
- if (level[i] > 3)
- {
- suffixLength = 2;
- }
-
- if (levelCode & 1)
- {
- level[i] = -level[i];
- }
- i++;
-
- }
-
- for (j = TotalCoeff - i; j > 0 ; j--)
- {
- ce_LevelPrefix(stream, &level_prefix);
- if (level_prefix < 15)
- {
- levelSuffixSize = suffixLength;
- }
- else
- {
- levelSuffixSize = 12;
- }
- BitstreamReadBits(stream, levelSuffixSize, &level_suffix);
-
- levelCode = (level_prefix << suffixLength) + level_suffix;
- level[i] = (levelCode >> 1) + 1;
- if (level[i] > (3 << (suffixLength - 1)) && suffixLength < 6)
- {
- suffixLength++;
- }
- if (levelCode & 1)
- {
- level[i] = -level[i];
- }
- i++;
- }
-
-
- if (TotalCoeff < maxNumCoeff)
- {
- if (nC >= 0)
- {
- ce_TotalZeros(stream, &zerosLeft, TotalCoeff);
- }
- else
- {
- ce_TotalZerosChromaDC(stream, &zerosLeft, TotalCoeff);
- }
- }
- else
- {
- zerosLeft = 0;
- }
-
- for (i = 0; i < TotalCoeff - 1; i++)
- {
- if (zerosLeft > 0)
- {
- ce_RunBefore(stream, &run_before, zerosLeft);
- run[i] = run_before;
- }
- else
- {
- run[i] = 0;
- zerosLeft = 0; // could be negative under error conditions
- }
-
- zerosLeft = zerosLeft - run[i];
- }
-
- if (zerosLeft < 0)
- {
- zerosLeft = 0;
-// return AVCDEC_FAIL;
- }
-
- run[TotalCoeff-1] = zerosLeft;
-
- /* leave the inverse zigzag scan part for the caller */
-
-
- return AVCDEC_SUCCESS;
-}
diff --git a/media/libstagefright/codecs/avc/dec/src/slice.cpp b/media/libstagefright/codecs/avc/dec/src/slice.cpp
deleted file mode 100644
index 7a2ef3d..0000000
--- a/media/libstagefright/codecs/avc/dec/src/slice.cpp
+++ /dev/null
@@ -1,772 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-/* Note for optimization: syntax decoding or operations related to B_SLICE should be
-commented out by macro definition or function pointers. */
-
-#include <string.h>
-
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-const static int mbPart2raster[3][4] = {{0, 0, 0, 0}, {1, 1, 0, 0}, {1, 0, 1, 0}};
-/* decode_frame_slice() */
-/* decode_one_slice() */
-AVCDec_Status DecodeSlice(AVCDecObject *decvid)
-{
- AVCDec_Status status;
- AVCCommonObj *video = decvid->common;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- AVCMacroblock *currMB ;
- AVCDecBitstream *stream = decvid->bitstream;
- uint slice_group_id;
- uint CurrMbAddr, moreDataFlag;
-
- /* set the first mb in slice */
- CurrMbAddr = sliceHdr->first_mb_in_slice;
- slice_group_id = video->MbToSliceGroupMap[CurrMbAddr];
-
- if ((CurrMbAddr && (CurrMbAddr != (uint)(video->mbNum + 1))) && video->currSeqParams->constrained_set1_flag == 1)
- {
- ConcealSlice(decvid, video->mbNum, CurrMbAddr);
- }
-
- moreDataFlag = 1;
- video->mb_skip_run = -1;
-
-
- /* while loop , see subclause 7.3.4 */
- do
- {
- if (CurrMbAddr >= video->PicSizeInMbs)
- {
- return AVCDEC_FAIL;
- }
-
- currMB = video->currMB = &(video->mblock[CurrMbAddr]);
- video->mbNum = CurrMbAddr;
- currMB->slice_id = video->slice_id; // slice
-
- /* we can remove this check if we don't support Mbaff. */
- /* we can wrap below into an initMB() function which will also
- do necessary reset of macroblock related parameters. */
-
- video->mb_x = CurrMbAddr % video->PicWidthInMbs;
- video->mb_y = CurrMbAddr / video->PicWidthInMbs;
-
- /* check the availability of neighboring macroblocks */
- InitNeighborAvailability(video, CurrMbAddr);
-
- /* read_macroblock and decode_one_macroblock() */
- status = DecodeMB(decvid);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-#ifdef MB_BASED_DEBLOCK
- if (video->currPicParams->num_slice_groups_minus1 == 0)
- {
- MBInLoopDeblock(video); /* MB-based deblocking */
- }
- else /* this mode cannot be used if the number of slice group is not one. */
- {
- return AVCDEC_FAIL;
- }
-#endif
- video->numMBs--;
-
- moreDataFlag = more_rbsp_data(stream);
-
-
- /* go to next MB */
- while (++CurrMbAddr < video->PicSizeInMbs && video->MbToSliceGroupMap[CurrMbAddr] != (int)slice_group_id)
- {
- }
-
- }
- while ((moreDataFlag && video->numMBs > 0) || video->mb_skip_run > 0); /* even if no more data, but last few MBs are skipped */
-
- if (video->numMBs == 0)
- {
- video->newPic = TRUE;
- video->mbNum = 0; // _Conceal
- return AVCDEC_PICTURE_READY;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* read MB mode and motion vectors */
-/* perform Intra/Inter prediction and residue */
-/* update video->mb_skip_run */
-AVCDec_Status DecodeMB(AVCDecObject *decvid)
-{
- AVCDec_Status status;
- AVCCommonObj *video = decvid->common;
- AVCDecBitstream *stream = decvid->bitstream;
- AVCMacroblock *currMB = video->currMB;
- uint mb_type;
- int slice_type = video->slice_type;
- int temp;
-
- currMB->QPy = video->QPy;
- currMB->QPc = video->QPc;
-
- if (slice_type == AVC_P_SLICE)
- {
- if (video->mb_skip_run < 0)
- {
- ue_v(stream, (uint *)&(video->mb_skip_run));
- }
-
- if (video->mb_skip_run == 0)
- {
- /* this will not handle the case where the slice ends with a mb_skip_run == 0 and no following MB data */
- ue_v(stream, &mb_type);
- if (mb_type > 30)
- {
- return AVCDEC_FAIL;
- }
- InterpretMBModeP(currMB, mb_type);
- video->mb_skip_run = -1;
- }
- else
- {
- /* see subclause 7.4.4 for more details on how
- mb_field_decoding_flag is derived in case of skipped MB */
-
- currMB->mb_intra = FALSE;
-
- currMB->mbMode = AVC_SKIP;
- currMB->MbPartWidth = currMB->MbPartHeight = 16;
- currMB->NumMbPart = 1;
- currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
- currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1; //
- currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
- currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
- currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
- currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
-
- memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
-
- currMB->CBP = 0;
- video->cbp4x4 = 0;
- /* for skipped MB, always look at the first entry in RefPicList */
- currMB->RefIdx[0] = currMB->RefIdx[1] =
- currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
- InterMBPrediction(video);
- video->mb_skip_run--;
- return AVCDEC_SUCCESS;
- }
-
- }
- else
- {
- /* Then decode mode and MV */
- ue_v(stream, &mb_type);
- if (mb_type > 25)
- {
- return AVCDEC_FAIL;
- }
- InterpretMBModeI(currMB, mb_type);
- }
-
-
- if (currMB->mbMode != AVC_I_PCM)
- {
-
- if (currMB->mbMode == AVC_P8 || currMB->mbMode == AVC_P8ref0)
- {
- status = sub_mb_pred(video, currMB, stream);
- }
- else
- {
- status = mb_pred(video, currMB, stream) ;
- }
-
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
-
- if (currMB->mbMode != AVC_I16)
- {
- /* decode coded_block_pattern */
- status = DecodeCBP(currMB, stream);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
-
- if (currMB->CBP > 0 || currMB->mbMode == AVC_I16)
- {
- se_v(stream, &temp);
- if (temp)
- {
- temp += (video->QPy + 52);
- currMB->QPy = video->QPy = temp - 52 * (temp * 79 >> 12);
- if (currMB->QPy > 51 || currMB->QPy < 0)
- {
- video->QPy = AVC_CLIP3(0, 51, video->QPy);
-// return AVCDEC_FAIL;
- }
- video->QPy_div_6 = (video->QPy * 43) >> 8;
- video->QPy_mod_6 = video->QPy - 6 * video->QPy_div_6;
- currMB->QPc = video->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->QPy + video->currPicParams->chroma_qp_index_offset)];
- video->QPc_div_6 = (video->QPc * 43) >> 8;
- video->QPc_mod_6 = video->QPc - 6 * video->QPc_div_6;
- }
- }
- /* decode residue and inverse transform */
- status = residual(decvid, currMB);
- if (status != AVCDEC_SUCCESS)
- {
- return status;
- }
- }
- else
- {
- if (stream->bitcnt & 7)
- {
- BitstreamByteAlign(stream);
- }
- /* decode pcm_byte[i] */
- DecodeIntraPCM(video, stream);
-
- currMB->QPy = 0; /* necessary for deblocking */ // _OPTIMIZE
- currMB->QPc = mapQPi2QPc[AVC_CLIP3(0, 51, video->currPicParams->chroma_qp_index_offset)];
-
- /* default values, don't know if really needed */
- currMB->CBP = 0x3F;
- video->cbp4x4 = 0xFFFF;
- currMB->mb_intra = TRUE;
- memset(currMB->nz_coeff, 16, sizeof(uint8)*NUM_BLKS_IN_MB);
- return AVCDEC_SUCCESS;
- }
-
-
- /* do Intra/Inter prediction, together with the residue compensation */
- /* This part should be common between the skip and no-skip */
- if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
- {
- IntraMBPrediction(video);
- }
- else
- {
- InterMBPrediction(video);
- }
-
-
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.1 */
-AVCDec_Status mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int mbPartIdx;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- uint max_ref_idx;
- const int *temp_0;
- int16 *temp_1;
- uint code;
-
- if (currMB->mbMode == AVC_I4 || currMB->mbMode == AVC_I16)
- {
-
- video->intraAvailA = video->intraAvailB = video->intraAvailC = video->intraAvailD = 0;
-
- if (!video->currPicParams->constrained_intra_pred_flag)
- {
- video->intraAvailA = video->mbAvailA;
- video->intraAvailB = video->mbAvailB;
- video->intraAvailC = video->mbAvailC;
- video->intraAvailD = video->mbAvailD;
- }
- else
- {
- if (video->mbAvailA)
- {
- video->intraAvailA = video->mblock[video->mbAddrA].mb_intra;
- }
- if (video->mbAvailB)
- {
- video->intraAvailB = video->mblock[video->mbAddrB].mb_intra ;
- }
- if (video->mbAvailC)
- {
- video->intraAvailC = video->mblock[video->mbAddrC].mb_intra;
- }
- if (video->mbAvailD)
- {
- video->intraAvailD = video->mblock[video->mbAddrD].mb_intra;
- }
- }
-
-
- if (currMB->mbMode == AVC_I4)
- {
- /* perform prediction to get the actual intra 4x4 pred mode */
- DecodeIntra4x4Mode(video, currMB, stream);
- /* output will be in currMB->i4Mode[4][4] */
- }
-
- ue_v(stream, &code);
-
- if (code > 3)
- {
- return AVCDEC_FAIL; /* out of range */
- }
- currMB->intra_chroma_pred_mode = (AVCIntraChromaPredMode)code;
- }
- else
- {
-
- memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
- max_ref_idx = video->refList0Size - 1;
-
- /* decode ref index for L0 */
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 0)
- {
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- te_v(stream, &code, max_ref_idx);
- if (code > (uint)max_ref_idx)
- {
- return AVCDEC_FAIL;
- }
- currMB->ref_idx_L0[mbPartIdx] = code;
- }
- }
-
- /* populate ref_idx_L0 */
- temp_0 = &mbPart2raster[currMB->mbMode-AVC_P16][0];
- temp_1 = &currMB->ref_idx_L0[3];
-
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
- *temp_1-- = currMB->ref_idx_L0[*temp_0++];
-
- /* Global reference index, these values are used in deblock */
- currMB->RefIdx[0] = video->RefPicList0[currMB->ref_idx_L0[0]]->RefIdx;
- currMB->RefIdx[1] = video->RefPicList0[currMB->ref_idx_L0[1]]->RefIdx;
- currMB->RefIdx[2] = video->RefPicList0[currMB->ref_idx_L0[2]]->RefIdx;
- currMB->RefIdx[3] = video->RefPicList0[currMB->ref_idx_L0[3]]->RefIdx;
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
- max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
- /* decode mvd_l0 */
- for (mbPartIdx = 0; mbPartIdx < currMB->NumMbPart; mbPartIdx++)
- {
- se_v(stream, &(video->mvd_l0[mbPartIdx][0][0]));
- se_v(stream, &(video->mvd_l0[mbPartIdx][0][1]));
- }
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/* see subclause 7.3.5.2 */
-AVCDec_Status sub_mb_pred(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int mbPartIdx, subMbPartIdx;
- AVCSliceHeader *sliceHdr = video->sliceHdr;
- uint max_ref_idx;
- uint sub_mb_type[4];
- uint code;
-
- memset(currMB->ref_idx_L0, 0, sizeof(int16)*4);
-
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- ue_v(stream, &(sub_mb_type[mbPartIdx]));
- if (sub_mb_type[mbPartIdx] > 3)
- {
- return AVCDEC_FAIL;
- }
-
- }
- /* we have to check the values to make sure they are valid */
- /* assign values to currMB->sub_mb_type[], currMB->MBPartPredMode[][x] */
-
- InterpretSubMBModeP(currMB, sub_mb_type);
-
-
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-// max_ref_idx = sliceHdr->num_ref_idx_l0_active_minus1;
- max_ref_idx = video->refList0Size - 1;
-
- if (sliceHdr->num_ref_idx_l0_active_minus1 > 0 && currMB->mbMode != AVC_P8ref0)
- {
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- te_v(stream, (uint*)&code, max_ref_idx);
- if (code > max_ref_idx)
- {
- return AVCDEC_FAIL;
- }
- currMB->ref_idx_L0[mbPartIdx] = code;
- }
- }
- /* see subclause 7.4.5.1 for the range of ref_idx_lX */
-
- max_ref_idx = sliceHdr->num_ref_idx_l1_active_minus1;
- /* if(video->MbaffFrameFlag && currMB->mb_field_decoding_flag)
- max_ref_idx = 2*sliceHdr->num_ref_idx_l1_active_minus1 + 1;*/
- for (mbPartIdx = 0; mbPartIdx < 4; mbPartIdx++)
- {
- for (subMbPartIdx = 0; subMbPartIdx < currMB->NumSubMbPart[mbPartIdx]; subMbPartIdx++)
- {
- se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][0]));
- se_v(stream, &(video->mvd_l0[mbPartIdx][subMbPartIdx][1]));
- }
- /* used in deblocking */
- currMB->RefIdx[mbPartIdx] = video->RefPicList0[currMB->ref_idx_L0[mbPartIdx]]->RefIdx;
- }
- return AVCDEC_SUCCESS;
-}
-
-void InterpretMBModeI(AVCMacroblock *mblock, uint mb_type)
-{
- mblock->NumMbPart = 1;
-
- mblock->mb_intra = TRUE;
-
- if (mb_type == 0) /* I_4x4 */
- {
- mblock->mbMode = AVC_I4;
- }
- else if (mb_type < 25) /* I_PCM */
- {
- mblock->mbMode = AVC_I16;
- mblock->i16Mode = (AVCIntra16x16PredMode)((mb_type - 1) & 0x3);
- if (mb_type > 12)
- {
- mblock->CBP = (((mb_type - 13) >> 2) << 4) + 0x0F;
- }
- else
- {
- mblock->CBP = ((mb_type - 1) >> 2) << 4;
- }
- }
- else
- {
- mblock->mbMode = AVC_I_PCM;
- }
-
- return ;
-}
-
-void InterpretMBModeP(AVCMacroblock *mblock, uint mb_type)
-{
- const static int map2PartWidth[5] = {16, 16, 8, 8, 8};
- const static int map2PartHeight[5] = {16, 8, 16, 8, 8};
- const static int map2NumPart[5] = {1, 2, 2, 4, 4};
- const static AVCMBMode map2mbMode[5] = {AVC_P16, AVC_P16x8, AVC_P8x16, AVC_P8, AVC_P8ref0};
-
- mblock->mb_intra = FALSE;
- if (mb_type < 5)
- {
- mblock->mbMode = map2mbMode[mb_type];
- mblock->MbPartWidth = map2PartWidth[mb_type];
- mblock->MbPartHeight = map2PartHeight[mb_type];
- mblock->NumMbPart = map2NumPart[mb_type];
- mblock->NumSubMbPart[0] = mblock->NumSubMbPart[1] =
- mblock->NumSubMbPart[2] = mblock->NumSubMbPart[3] = 1;
- mblock->SubMbPartWidth[0] = mblock->SubMbPartWidth[1] =
- mblock->SubMbPartWidth[2] = mblock->SubMbPartWidth[3] = mblock->MbPartWidth;
- mblock->SubMbPartHeight[0] = mblock->SubMbPartHeight[1] =
- mblock->SubMbPartHeight[2] = mblock->SubMbPartHeight[3] = mblock->MbPartHeight;
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 5);
- /* set MV and Ref_Idx codes of Intra blocks in P-slices */
- memset(mblock->mvL0, 0, sizeof(int32)*16);
- mblock->ref_idx_L0[0] = mblock->ref_idx_L0[1] = mblock->ref_idx_L0[2] = mblock->ref_idx_L0[3] = -1;
- }
- return ;
-}
-
-void InterpretMBModeB(AVCMacroblock *mblock, uint mb_type)
-{
- const static int map2PartWidth[23] = {8, 16, 16, 16, 16, 8, 16, 8, 16, 8,
- 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 8
- };
- const static int map2PartHeight[23] = {8, 16, 16, 16, 8, 16, 8, 16, 8,
- 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8, 16, 8
- };
- /* see enum AVCMBType declaration */
- const static AVCMBMode map2mbMode[23] = {AVC_BDirect16, AVC_P16, AVC_P16, AVC_P16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16,
- AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P16x8, AVC_P8x16, AVC_P8
- };
- const static int map2PredMode1[23] = {3, 0, 1, 2, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, -1};
- const static int map2PredMode2[23] = { -1, -1, -1, -1, 0, 0, 1, 1, 1, 1, 0, 0, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, -1};
- const static int map2NumPart[23] = { -1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4};
-
- mblock->mb_intra = FALSE;
-
- if (mb_type < 23)
- {
- mblock->mbMode = map2mbMode[mb_type];
- mblock->NumMbPart = map2NumPart[mb_type];
- mblock->MBPartPredMode[0][0] = (AVCPredMode)map2PredMode1[mb_type];
- if (mblock->NumMbPart > 1)
- {
- mblock->MBPartPredMode[1][0] = (AVCPredMode)map2PredMode2[mb_type];
- }
- mblock->MbPartWidth = map2PartWidth[mb_type];
- mblock->MbPartHeight = map2PartHeight[mb_type];
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 23);
- }
-
- return ;
-}
-
-void InterpretMBModeSI(AVCMacroblock *mblock, uint mb_type)
-{
- mblock->mb_intra = TRUE;
-
- if (mb_type == 0)
- {
- mblock->mbMode = AVC_SI4;
- /* other values are N/A */
- }
- else
- {
- InterpretMBModeI(mblock, mb_type - 1);
- }
- return ;
-}
-
-/* input is mblock->sub_mb_type[] */
-void InterpretSubMBModeP(AVCMacroblock *mblock, uint *sub_mb_type)
-{
- int i, sub_type;
- /* see enum AVCMBType declaration */
-// const static AVCSubMBMode map2subMbMode[4] = {AVC_8x8,AVC_8x4,AVC_4x8,AVC_4x4};
- const static int map2subPartWidth[4] = {8, 8, 4, 4};
- const static int map2subPartHeight[4] = {8, 4, 8, 4};
- const static int map2numSubPart[4] = {1, 2, 2, 4};
-
- for (i = 0; i < 4 ; i++)
- {
- sub_type = (int) sub_mb_type[i];
- // mblock->subMbMode[i] = map2subMbMode[sub_type];
- mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
- mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
- mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
- }
-
- return ;
-}
-
-void InterpretSubMBModeB(AVCMacroblock *mblock, uint *sub_mb_type)
-{
- int i, j, sub_type;
- /* see enum AVCMBType declaration */
- const static AVCSubMBMode map2subMbMode[13] = {AVC_BDirect8, AVC_8x8, AVC_8x8,
- AVC_8x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_8x4, AVC_4x8, AVC_4x4, AVC_4x4, AVC_4x4
- };
- const static int map2subPartWidth[13] = {4, 8, 8, 8, 8, 4, 8, 4, 8, 4, 4, 4, 4};
- const static int map2subPartHeight[13] = {4, 8, 8, 8, 4, 8, 4, 8, 4, 8, 4, 4, 4};
- const static int map2numSubPart[13] = {1, 1, 1, 2, 2, 2, 2, 2, 2, 4, 4, 4};
- const static int map2predMode[13] = {3, 0, 1, 2, 0, 0, 1, 1, 2, 2, 0, 1, 2};
-
- for (i = 0; i < 4 ; i++)
- {
- sub_type = (int) sub_mb_type[i];
- mblock->subMbMode[i] = map2subMbMode[sub_type];
- mblock->NumSubMbPart[i] = map2numSubPart[sub_type];
- mblock->SubMbPartWidth[i] = map2subPartWidth[sub_type];
- mblock->SubMbPartHeight[i] = map2subPartHeight[sub_type];
- for (j = 0; j < 4; j++)
- {
- mblock->MBPartPredMode[i][j] = (AVCPredMode)map2predMode[sub_type];
- }
- }
-
- return ;
-}
-
-/* see subclause 8.3.1 */
-AVCDec_Status DecodeIntra4x4Mode(AVCCommonObj *video, AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- int intra4x4PredModeA = 0, intra4x4PredModeB = 0, predIntra4x4PredMode = 0;
- int component, SubBlock_indx, block_x, block_y;
- int dcOnlyPredictionFlag;
- uint prev_intra4x4_pred_mode_flag[16];
- int rem_intra4x4_pred_mode[16];
- int bindx = 0;
-
- for (component = 0; component < 4; component++) /* partition index */
- {
- block_x = ((component & 1) << 1);
- block_y = ((component >> 1) << 1);
-
- for (SubBlock_indx = 0; SubBlock_indx < 4; SubBlock_indx++) /* sub-partition index */
- {
- BitstreamRead1Bit(stream, &(prev_intra4x4_pred_mode_flag[bindx]));
-
- if (!prev_intra4x4_pred_mode_flag[bindx])
- {
- BitstreamReadBits(stream, 3, (uint*)&(rem_intra4x4_pred_mode[bindx]));
- }
-
- dcOnlyPredictionFlag = 0;
- if (block_x > 0)
- {
- intra4x4PredModeA = currMB->i4Mode[(block_y << 2) + block_x - 1 ];
- }
- else
- {
- if (video->intraAvailA)
- {
- if (video->mblock[video->mbAddrA].mbMode == AVC_I4)
- {
- intra4x4PredModeA = video->mblock[video->mbAddrA].i4Mode[(block_y << 2) + 3];
- }
- else
- {
- intra4x4PredModeA = AVC_I4_DC;
- }
- }
- else
- {
- dcOnlyPredictionFlag = 1;
- }
- }
-
- if (block_y > 0)
- {
- intra4x4PredModeB = currMB->i4Mode[((block_y-1) << 2) + block_x];
- }
- else
- {
- if (video->intraAvailB)
- {
- if (video->mblock[video->mbAddrB].mbMode == AVC_I4)
- {
- intra4x4PredModeB = video->mblock[video->mbAddrB].i4Mode[(3 << 2) + block_x];
- }
- else
- {
- intra4x4PredModeB = AVC_I4_DC;
- }
- }
- else
- {
- dcOnlyPredictionFlag = 1;
- }
- }
-
- if (dcOnlyPredictionFlag)
- {
- intra4x4PredModeA = intra4x4PredModeB = AVC_I4_DC;
- }
-
- predIntra4x4PredMode = AVC_MIN(intra4x4PredModeA, intra4x4PredModeB);
- if (prev_intra4x4_pred_mode_flag[bindx])
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)predIntra4x4PredMode;
- }
- else
- {
- if (rem_intra4x4_pred_mode[bindx] < predIntra4x4PredMode)
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)rem_intra4x4_pred_mode[bindx];
- }
- else
- {
- currMB->i4Mode[(block_y<<2)+block_x] = (AVCIntra4x4PredMode)(rem_intra4x4_pred_mode[bindx] + 1);
- }
- }
- bindx++;
- block_y += (SubBlock_indx & 1) ;
- block_x += (1 - 2 * (SubBlock_indx & 1)) ;
- }
- }
- return AVCDEC_SUCCESS;
-}
-AVCDec_Status ConcealSlice(AVCDecObject *decvid, int mbnum_start, int mbnum_end)
-{
- AVCCommonObj *video = decvid->common;
- AVCMacroblock *currMB ;
-
- int CurrMbAddr;
-
- if (video->RefPicList0[0] == NULL)
- {
- return AVCDEC_FAIL;
- }
-
- for (CurrMbAddr = mbnum_start; CurrMbAddr < mbnum_end; CurrMbAddr++)
- {
- currMB = video->currMB = &(video->mblock[CurrMbAddr]);
- video->mbNum = CurrMbAddr;
- currMB->slice_id = video->slice_id++; // slice
-
- /* we can remove this check if we don't support Mbaff. */
- /* we can wrap below into an initMB() function which will also
- do necessary reset of macroblock related parameters. */
-
- video->mb_x = CurrMbAddr % video->PicWidthInMbs;
- video->mb_y = CurrMbAddr / video->PicWidthInMbs;
-
- /* check the availability of neighboring macroblocks */
- InitNeighborAvailability(video, CurrMbAddr);
-
- currMB->mb_intra = FALSE;
-
- currMB->mbMode = AVC_SKIP;
- currMB->MbPartWidth = currMB->MbPartHeight = 16;
-
- currMB->NumMbPart = 1;
- currMB->NumSubMbPart[0] = currMB->NumSubMbPart[1] =
- currMB->NumSubMbPart[2] = currMB->NumSubMbPart[3] = 1;
- currMB->SubMbPartWidth[0] = currMB->SubMbPartWidth[1] =
- currMB->SubMbPartWidth[2] = currMB->SubMbPartWidth[3] = currMB->MbPartWidth;
- currMB->SubMbPartHeight[0] = currMB->SubMbPartHeight[1] =
- currMB->SubMbPartHeight[2] = currMB->SubMbPartHeight[3] = currMB->MbPartHeight;
- currMB->QPy = 26;
- currMB->QPc = 26;
- memset(currMB->nz_coeff, 0, sizeof(uint8)*NUM_BLKS_IN_MB);
-
- currMB->CBP = 0;
- video->cbp4x4 = 0;
- /* for skipped MB, always look at the first entry in RefPicList */
- currMB->RefIdx[0] = currMB->RefIdx[1] =
- currMB->RefIdx[2] = currMB->RefIdx[3] = video->RefPicList0[0]->RefIdx;
- InterMBPrediction(video);
-
- video->numMBs--;
-
- }
-
- return AVCDEC_SUCCESS;
-}
-
diff --git a/media/libstagefright/codecs/avc/dec/src/vlc.cpp b/media/libstagefright/codecs/avc/dec/src/vlc.cpp
deleted file mode 100644
index f531249..0000000
--- a/media/libstagefright/codecs/avc/dec/src/vlc.cpp
+++ /dev/null
@@ -1,815 +0,0 @@
-/* ------------------------------------------------------------------
- * Copyright (C) 1998-2009 PacketVideo
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
- * express or implied.
- * See the License for the specific language governing permissions
- * and limitations under the License.
- * -------------------------------------------------------------------
- */
-#include "avcdec_lib.h"
-#include "avcdec_bitstream.h"
-
-//#define PV_ARM_V5
-#ifdef PV_ARM_V5
-#define PV_CLZ(A,B) __asm{CLZ (A),(B)} \
- A -= 16;
-#else
-#define PV_CLZ(A,B) while (((B) & 0x8000) == 0) {(B) <<=1; A++;}
-#endif
-
-
-#define PV_NO_CLZ
-
-#ifndef PV_NO_CLZ
-typedef struct tagVLCNumCoeffTrail
-{
- int trailing;
- int total_coeff;
- int length;
-} VLCNumCoeffTrail;
-
-typedef struct tagShiftOffset
-{
- int shift;
- int offset;
-} ShiftOffset;
-
-const VLCNumCoeffTrail NumCoeffTrailOnes[3][67] =
-{
- {{0, 0, 1}, {1, 1, 2}, {2, 2, 3}, {1, 2, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 5, 7},
- {2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {3, 6, 8}, {2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 7, 9},
- {2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 8, 10}, {2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 9, 11},
- {2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13},
- {2, 8, 13}, {1, 7, 13}, {0, 6, 13}, {3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14},
- {2, 10, 14}, {1, 9, 14}, {0, 9, 14}, {3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15},
- {2, 12, 15}, {1, 11, 15}, {0, 11, 15}, {3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16},
- {2, 14, 16}, {1, 14, 16}, {0, 13, 16}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16}, {1, 13, 15},
- { -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}},
-
- {{1, 1, 2}, {0, 0, 2}, {3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {3, 6, 6}, {2, 3, 6},
- {1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5}, {3, 7, 6}, {2, 4, 6},
- {1, 4, 6}, {0, 2, 6}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7}, {0, 5, 8}, {2, 6, 8},
- {1, 6, 8}, {0, 4, 8}, {3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {3, 11, 11}, {2, 9, 11},
- {1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11}, {0, 11, 12}, {2, 11, 12},
- {1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12}, {3, 14, 13}, {2, 13, 13},
- {1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13}, {1, 15, 14}, {0, 15, 14},
- {2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13}, {3, 16, 14}, {2, 16, 14},
- {1, 16, 14}, {0, 16, 14}, {3, 15, 13}},
-
- {{3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4}, {0, 0, 4},
- {1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5}, {1, 2, 5},
- {0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6}, {0, 1, 6},
- {0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7}, {0, 4, 7},
- {3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8}, {0, 8, 8},
- {0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9}, {0, 10, 9},
- {1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9}, {1, 13, 9},
- {1, 16, 10}, {0, 15, 10}, {3, 15, 10}, {2, 15, 10}, {3, 16, 10}, {2, 16, 10}, {0, 16, 10}, { -1, -1, -1},
- { -1, -1, -1}, { -1, -1, -1}, { -1, -1, -1}}
-};
-
-
-const ShiftOffset NumCoeffTrailOnes_indx[3][15] =
-{
- {{15, -1}, {14, 0}, {13, 1}, {10, -1}, {9, 3}, {8, 7}, {7, 11}, {6, 15},
- {5, 19}, {3, 19}, {2, 27}, {1, 35}, {0, 43}, {0, 55}, {1, 62}},
-
- {{14, -2}, {12, -2}, {10, -2}, {10, 10}, {9, 14}, {8, 18}, {7, 22}, {5, 22},
- {4, 30}, {3, 38}, {2, 46}, {2, 58}, {3, 65}, {16, 0}, {16, 0}},
-
- {{12, -8}, {11, 0}, {10, 8}, {9, 16}, {8, 24}, {7, 32}, {6, 40}, {6, 52},
- {6, 58}, {6, 61}, {16, 0}, {16, 0}, {16, 0}, {16, 0}, {16, 0}}
-};
-
-const static int nC_table[8] = {0, 0, 1, 1, 2, 2, 2, 2};
-
-#endif
-/**
-See algorithm in subclause 9.1, Table 9-1, Table 9-2. */
-AVCDec_Status ue_v(AVCDecBitstream *bitstream, uint *codeNum)
-{
- uint temp, tmp_cnt;
- int leading_zeros = 0;
- BitstreamShowBits(bitstream, 16, &temp);
- tmp_cnt = temp | 0x1;
-
- PV_CLZ(leading_zeros, tmp_cnt)
-
- if (leading_zeros < 8)
- {
- *codeNum = (temp >> (15 - (leading_zeros << 1))) - 1;
- BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
- }
- else
- {
- BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
- *codeNum = temp - 1;
- }
-
- return AVCDEC_SUCCESS;
-}
-
-/**
-See subclause 9.1.1, Table 9-3 */
-AVCDec_Status se_v(AVCDecBitstream *bitstream, int *value)
-{
- uint temp, tmp_cnt;
- int leading_zeros = 0;
- BitstreamShowBits(bitstream, 16, &temp);
- tmp_cnt = temp | 0x1;
-
- PV_CLZ(leading_zeros, tmp_cnt)
-
- if (leading_zeros < 8)
- {
- temp >>= (15 - (leading_zeros << 1));
- BitstreamFlushBits(bitstream, (leading_zeros << 1) + 1);
- }
- else
- {
- BitstreamReadBits(bitstream, (leading_zeros << 1) + 1, &temp);
- }
-
- *value = temp >> 1;
-
- if (temp & 0x01) // lsb is signed bit
- *value = -(*value);
-
-// leading_zeros = temp >> 1;
-// *value = leading_zeros - (leading_zeros*2*(temp&1));
-
- return AVCDEC_SUCCESS;
-}
-
-AVCDec_Status se_v32bit(AVCDecBitstream *bitstream, int32 *value)
-{
- int leadingZeros;
- uint32 infobits;
- uint32 codeNum;
-
- if (AVCDEC_SUCCESS != GetEGBitstring32bit(bitstream, &leadingZeros, &infobits))
- return AVCDEC_FAIL;
-
- codeNum = (1 << leadingZeros) - 1 + infobits;
-
- *value = (codeNum + 1) / 2;
-
- if ((codeNum & 0x01) == 0) // lsb is signed bit
- *value = -(*value);
-
- return AVCDEC_SUCCESS;
-}
-
-
-AVCDec_Status te_v(AVCDecBitstream *bitstream, uint *value, uint range)
-{
- if (range > 1)
- {
- ue_v(bitstream, value);
- }
- else
- {
- BitstreamRead1Bit(bitstream, value);
- *value = 1 - (*value);
- }
- return AVCDEC_SUCCESS;
-}
-
-
-
-/* This function is only used for syntax with range from -2^31 to 2^31-1 */
-/* only a few of them in the SPS and PPS */
-AVCDec_Status GetEGBitstring32bit(AVCDecBitstream *bitstream, int *leadingZeros, uint32 *infobits)
-{
- int bit_value;
- uint info_temp;
-
- *leadingZeros = 0;
-
- BitstreamRead1Bit(bitstream, (uint*)&bit_value);
-
- while (!bit_value)
- {
- (*leadingZeros)++;
- BitstreamRead1Bit(bitstream, (uint*)&bit_value);
- }
-
- if (*leadingZeros > 0)
- {
- if (sizeof(uint) == 4) /* 32 bit machine */
- {
- BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
- *infobits = (uint32)info_temp;
- }
- else if (sizeof(uint) == 2) /* 16 bit machine */
- {
- *infobits = 0;
- if (*leadingZeros > 16)
- {
- BitstreamReadBits(bitstream, 16, (uint*)&info_temp);
- (*leadingZeros) -= 16;
- *infobits = ((uint32)info_temp) << (*leadingZeros);
- }
-
- BitstreamReadBits(bitstream, *leadingZeros, (uint*)&info_temp);
- *infobits |= (uint32)info_temp ;
- }
- }
- else
- *infobits = 0;
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-4 assignment of codeNum to values of coded_block_pattern. */
-const static uint8 MapCBP[48][2] =
-{
- {47, 0}, {31, 16}, {15, 1}, { 0, 2}, {23, 4}, {27, 8}, {29, 32}, {30, 3}, { 7, 5}, {11, 10}, {13, 12}, {14, 15},
- {39, 47}, {43, 7}, {45, 11}, {46, 13}, {16, 14}, { 3, 6}, { 5, 9}, {10, 31}, {12, 35}, {19, 37}, {21, 42}, {26, 44},
- {28, 33}, {35, 34}, {37, 36}, {42, 40}, {44, 39}, { 1, 43}, { 2, 45}, { 4, 46}, { 8, 17}, {17, 18}, {18, 20}, {20, 24},
- {24, 19}, { 6, 21}, { 9, 26}, {22, 28}, {25, 23}, {32, 27}, {33, 29}, {34, 30}, {36, 22}, {40, 25}, {38, 38}, {41, 41},
-};
-
-AVCDec_Status DecodeCBP(AVCMacroblock *currMB, AVCDecBitstream *stream)
-{
- uint codeNum;
- uint coded_block_pattern;
-
- ue_v(stream, &codeNum);
-
- if (codeNum > 47)
- {
- return AVCDEC_FAIL;
- }
-
- /* can get rid of the if _OPTIMIZE */
- if (currMB->mbMode == AVC_I4)
- {
- coded_block_pattern = MapCBP[codeNum][0];
- }
- else
- {
- coded_block_pattern = MapCBP[codeNum][1];
- }
-
-// currMB->cbpL = coded_block_pattern&0xF; /* modulo 16 */
-// currMB->cbpC = coded_block_pattern>>4; /* divide 16 */
- currMB->CBP = coded_block_pattern;
-
- return AVCDEC_SUCCESS;
-}
-
-
-/* TO BE OPTIMIZED !!!!! */
-AVCDec_Status ce_TotalCoeffTrailingOnes(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff, int nC)
-{
-#ifdef PV_NO_CLZ
- const static uint8 TotCofNTrail1[75][3] = {{0, 0, 16}/*error */, {0, 0, 16}/*error */, {1, 13, 15}, {1, 13, 15}, {0, 16, 16}, {2, 16, 16}, {1, 16, 16}, {0, 15, 16},
- {3, 16, 16}, {2, 15, 16}, {1, 15, 16}, {0, 14, 16}, {3, 15, 16}, {2, 14, 16}, {1, 14, 16}, {0, 13, 16},
- {3, 14, 15}, {2, 13, 15}, {1, 12, 15}, {0, 12, 15}, {3, 13, 15}, {2, 12, 15}, {1, 11, 15}, {0, 11, 15},
- {3, 12, 14}, {2, 11, 14}, {1, 10, 14}, {0, 10, 14}, {3, 11, 14}, {2, 10, 14}, {1, 9, 14}, {0, 9, 14},
- {0, 8, 13}, {2, 9, 13}, {1, 8, 13}, {0, 7, 13}, {3, 10, 13}, {2, 8, 13}, {1, 7, 13}, {0, 6, 13},
- {3, 9, 11}, {2, 7, 11}, {1, 6, 11}, {0, 5, 11}, {3, 8, 10},
- {2, 6, 10}, {1, 5, 10}, {0, 4, 10}, {3, 7, 9}, {2, 5, 9}, {1, 4, 9}, {0, 3, 9}, {3, 6, 8},
- {2, 4, 8}, {1, 3, 8}, {0, 2, 8}, {3, 5, 7}, {2, 3, 7}, {3, 4, 6}, {3, 4, 6}, {1, 2, 6},
- {1, 2, 6}, {0, 1, 6}, {0, 1, 6}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {3, 3, 5}, {2, 2, 3},
- {1, 1, 2}, {1, 1, 2}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}, {0, 0, 1}
- };
-
- const static uint8 TotCofNTrail2[84][3] = {{0, 0, 14 /* error */}, {0, 0, 14/*error */}, {3, 15, 13}, {3, 15, 13}, {3, 16, 14}, {2, 16, 14}, {1, 16, 14}, {0, 16, 14},
- {1, 15, 14}, {0, 15, 14}, {2, 15, 14}, {1, 14, 14}, {2, 14, 13}, {2, 14, 13}, {0, 14, 13}, {0, 14, 13},
- {3, 14, 13}, {2, 13, 13}, {1, 13, 13}, {0, 13, 13}, {3, 13, 13}, {2, 12, 13}, {1, 12, 13}, {0, 12, 13},
- {0, 11, 12}, {2, 11, 12}, {1, 11, 12}, {0, 10, 12}, {3, 12, 12}, {2, 10, 12}, {1, 10, 12}, {0, 9, 12},
- {3, 11, 11}, {2, 9, 11}, {1, 9, 11}, {0, 8, 11}, {3, 10, 11}, {2, 8, 11}, {1, 8, 11}, {0, 7, 11},
- {3, 9, 9}, {2, 7, 9}, {1, 7, 9}, {0, 6, 9}, {0, 5, 8}, {0, 5, 8}, {2, 6, 8}, {2, 6, 8},
- {1, 6, 8}, {1, 6, 8}, {0, 4, 8}, {0, 4, 8}, {3, 8, 7}, {2, 5, 7}, {1, 5, 7}, {0, 3, 7},
- {3, 7, 6}, {3, 7, 6}, {2, 4, 6}, {2, 4, 6}, {1, 4, 6}, {1, 4, 6}, {0, 2, 6}, {0, 2, 6},
- {3, 6, 6}, {2, 3, 6}, {1, 3, 6}, {0, 1, 6}, {3, 5, 5}, {3, 5, 5}, {1, 2, 5}, {1, 2, 5},
- {3, 4, 4}, {3, 3, 4}, {2, 2, 3}, {2, 2, 3}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2}, {1, 1, 2},
- {0, 0, 2}, {0, 0, 2}, {0, 0, 2}, {0, 0, 2}
- };
-
- const static uint8 TotCofNTrail3[64][3] = {{0, 0, 10/*error*/}, {0, 16, 10}, {3, 16, 10}, {2, 16, 10}, {1, 16, 10}, {0, 15, 10}, {3, 15, 10},
- {2, 15, 10}, {1, 15, 10}, {0, 14, 10}, {3, 14, 10}, {2, 14, 10}, {1, 14, 10}, {0, 13, 10}, {1, 13, 9},
- {1, 13, 9}, {0, 12, 9}, {2, 13, 9}, {1, 12, 9}, {0, 11, 9}, {3, 13, 9}, {2, 12, 9}, {1, 11, 9},
- {0, 10, 9}, {3, 12, 8}, {2, 11, 8}, {1, 10, 8}, {0, 9, 8}, {3, 11, 8}, {2, 10, 8}, {1, 9, 8},
- {0, 8, 8}, {0, 7, 7}, {0, 6, 7}, {2, 9, 7}, {0, 5, 7}, {3, 10, 7}, {2, 8, 7}, {1, 8, 7},
- {0, 4, 7}, {0, 3, 6}, {2, 7, 6}, {1, 7, 6}, {0, 2, 6}, {3, 9, 6}, {2, 6, 6}, {1, 6, 6},
- {0, 1, 6}, {1, 5, 5}, {2, 5, 5}, {1, 4, 5}, {2, 4, 5}, {1, 3, 5}, {3, 8, 5}, {2, 3, 5},
- {1, 2, 5}, {3, 7, 4}, {3, 6, 4}, {3, 5, 4}, {3, 4, 4}, {3, 3, 4}, {2, 2, 4}, {1, 1, 4},
- {0, 0, 4}
- };
-#endif
- uint code;
-
-#ifdef PV_NO_CLZ
- uint8 *pcode;
- if (nC < 2)
- {
- BitstreamShowBits(stream, 16, &code);
-
- if (code >= 8192)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>13)+65+2][0]);
- }
- else if (code >= 2048)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>9)+50+2][0]);
- }
- else if (code >= 1024)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>8)+46+2][0]);
- }
- else if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>7)+42+2][0]);
- }
- else if (code >= 256)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>6)+38+2][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>5)+34+2][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>3)+22+2][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>2)+14+2][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail1[(code>>1)+6+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail1[(code-2)+2][0]);
- }
-
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else if (nC < 4)
- {
- BitstreamShowBits(stream, 14, &code);
-
- if (code >= 4096)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>10)+66+2][0]);
- }
- else if (code >= 2048)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>8)+54+2][0]);
- }
- else if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>7)+46+2][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>5)+34+2][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>3)+22+2][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>2)+14+2][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail2[(code>>1)+6+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail2[code-2+2][0]);
- }
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else if (nC < 8)
- {
- BitstreamShowBits(stream, 10, &code);
-
- if (code >= 512)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>6)+47+1][0]);
- }
- else if (code >= 256)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>5)+39+1][0]);
- }
- else if (code >= 128)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>4)+31+1][0]);
- }
- else if (code >= 64)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>3)+23+1][0]);
- }
- else if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>2)+15+1][0]);
- }
- else if (code >= 16)
- {
- pcode = (uint8*) & (TotCofNTrail3[(code>>1)+7+1][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail3[code-1+1][0]);
- }
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
- }
- else
- {
- /* read 6 bit FLC */
- BitstreamReadBits(stream, 6, &code);
-
-
- *TrailingOnes = code & 3;
- *TotalCoeff = (code >> 2) + 1;
-
- if (*TotalCoeff > 16)
- {
- *TotalCoeff = 16; // _ERROR
- }
-
- if (code == 3)
- {
- *TrailingOnes = 0;
- (*TotalCoeff)--;
- }
- }
-#else
- const VLCNumCoeffTrail *ptr;
- const ShiftOffset *ptr_indx;
- uint temp, leading_zeros = 0;
-
- if (nC < 8)
- {
-
- BitstreamShowBits(stream, 16, &code);
- temp = code | 1;
-
- PV_CLZ(leading_zeros, temp)
-
- temp = nC_table[nC];
- ptr_indx = &NumCoeffTrailOnes_indx[temp][leading_zeros];
- ptr = &NumCoeffTrailOnes[temp][(code >> ptr_indx->shift) + ptr_indx->offset];
- *TrailingOnes = ptr->trailing;
- *TotalCoeff = ptr->total_coeff;
- BitstreamFlushBits(stream, ptr->length);
- }
- else
- {
- /* read 6 bit FLC */
- BitstreamReadBits(stream, 6, &code);
-
-
- *TrailingOnes = code & 3;
- *TotalCoeff = (code >> 2) + 1;
-
- if (*TotalCoeff > 16)
- {
- *TotalCoeff = 16; // _ERROR
- }
-
- if (code == 3)
- {
- *TrailingOnes = 0;
- (*TotalCoeff)--;
- }
- }
-#endif
- return AVCDEC_SUCCESS;
-}
-
-/* TO BE OPTIMIZED !!!!! */
-AVCDec_Status ce_TotalCoeffTrailingOnesChromaDC(AVCDecBitstream *stream, int *TrailingOnes, int *TotalCoeff)
-{
- AVCDec_Status status;
-
- const static uint8 TotCofNTrail5[21][3] =
- {
- {3, 4, 7}, {3, 4, 7}, {2, 4, 8}, {1, 4, 8}, {2, 3, 7}, {2, 3, 7}, {1, 3, 7},
- {1, 3, 7}, {0, 4, 6}, {0, 3, 6}, {0, 2, 6}, {3, 3, 6}, {1, 2, 6}, {0, 1, 6},
- {2, 2, 3}, {0, 0, 2}, {0, 0, 2}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}, {1, 1, 1}
- };
-
- uint code;
- uint8 *pcode;
-
- status = BitstreamShowBits(stream, 8, &code);
-
- if (code >= 32)
- {
- pcode = (uint8*) & (TotCofNTrail5[(code>>5)+13][0]);
- }
- else if (code >= 8)
- {
- pcode = (uint8*) & (TotCofNTrail5[(code>>2)+6][0]);
- }
- else
- {
- pcode = (uint8*) & (TotCofNTrail5[code][0]);
- }
-
- *TrailingOnes = pcode[0];
- *TotalCoeff = pcode[1];
-
- BitstreamFlushBits(stream, pcode[2]);
-
- return status;
-}
-
-/* see Table 9-6 */
-AVCDec_Status ce_LevelPrefix(AVCDecBitstream *stream, uint *code)
-{
- uint temp;
- uint leading_zeros = 0;
- BitstreamShowBits(stream, 16, &temp);
- temp |= 1 ;
-
- PV_CLZ(leading_zeros, temp)
-
- BitstreamFlushBits(stream, leading_zeros + 1);
- *code = leading_zeros;
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-7 and 9-8 */
-AVCDec_Status ce_TotalZeros(AVCDecBitstream *stream, int *code, int TotalCoeff)
-{
- const static uint8 TotZero1[28][2] = {{15, 9}, {14, 9}, {13, 9}, {12, 8},
- {12, 8}, {11, 8}, {11, 8}, {10, 7}, {9, 7}, {8, 6}, {8, 6}, {7, 6}, {7, 6}, {6, 5}, {6, 5},
- {6, 5}, {6, 5}, {5, 5}, {5, 5}, {5, 5}, {5, 5}, {4, 4}, {3, 4},
- {2, 3}, {2, 3}, {1, 3}, {1, 3}, {0, 1}
- };
-
- const static uint8 TotZero2n3[2][18][2] = {{{14, 6}, {13, 6}, {12, 6}, {11, 6},
- {10, 5}, {10, 5}, {9, 5}, {9, 5}, {8, 4}, {7, 4}, {6, 4}, {5, 4}, {4, 3}, {4, 3},
- {3, 3}, {2, 3}, {1, 3}, {0, 3}},
-
- /*const static uint8 TotZero3[18][2]=*/{{13, 6}, {11, 6}, {12, 5}, {12, 5}, {10, 5},
- {10, 5}, {9, 5}, {9, 5}, {8, 4}, {5, 4}, {4, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {3, 3},
- {2, 3}, {1, 3}}
- };
-
- const static uint8 TotZero4[17][2] = {{12, 5}, {11, 5}, {10, 5}, {0, 5}, {9, 4},
- {9, 4}, {7, 4}, {7, 4}, {3, 4}, {3, 4}, {2, 4}, {2, 4}, {8, 3}, {6, 3}, {5, 3}, {4, 3}, {1, 3}
- };
-
- const static uint8 TotZero5[13][2] = {{11, 5}, {9, 5}, {10, 4}, {8, 4}, {2, 4},
- {1, 4}, {0, 4}, {7, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}
- };
-
- const static uint8 TotZero6to10[5][15][2] = {{{10, 6}, {0, 6}, {1, 5}, {1, 5}, {8, 4},
- {8, 4}, {8, 4}, {8, 4}, {9, 3}, {7, 3}, {6, 3}, {5, 3}, {4, 3}, {3, 3}, {2, 3}},
-
- /*const static uint8 TotZero7[15][2]=*/{{9, 6}, {0, 6}, {1, 5}, {1, 5}, {7, 4},
- {7, 4}, {7, 4}, {7, 4}, {8, 3}, {6, 3}, {4, 3}, {3, 3}, {2, 3}, {5, 2}, {5, 2}},
-
- /*const static uint8 TotZero8[15][2]=*/{{8, 6}, {0, 6}, {2, 5}, {2, 5}, {1, 4},
- {1, 4}, {1, 4}, {1, 4}, {7, 3}, {6, 3}, {3, 3}, {5, 2}, {5, 2}, {4, 2}, {4, 2}},
-
- /*const static uint8 TotZero9[15][2]=*/{{1, 6}, {0, 6}, {7, 5}, {7, 5}, {2, 4},
- {2, 4}, {2, 4}, {2, 4}, {5, 3}, {6, 2}, {6, 2}, {4, 2}, {4, 2}, {3, 2}, {3, 2}},
-
- /*const static uint8 TotZero10[11][2]=*/{{1, 5}, {0, 5}, {6, 4}, {6, 4}, {2, 3},
- {2, 3}, {2, 3}, {2, 3}, {5, 2}, {4, 2}, {3, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}}
- };
-
- const static uint8 TotZero11[7][2] = {{0, 4}, {1, 4}, {2, 3}, {2, 3}, {3, 3}, {5, 3}, {4, 1}};
-
- const static uint8 TotZero12to15[4][5][2] =
- {
- {{3, 1}, {2, 2}, {4, 3}, {1, 4}, {0, 4}},
- {{2, 1}, {3, 2}, {1, 3}, {0, 3}, {0, 0}},
- {{2, 1}, {1, 2}, {0, 2}, {0, 0}, {0, 0}},
- {{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}}
- };
-
- uint temp, mask;
- int indx;
- uint8 *pcode;
-
- if (TotalCoeff == 1)
- {
- BitstreamShowBits(stream, 9, &temp);
-
- if (temp >= 256)
- {
- pcode = (uint8*) & (TotZero1[27][0]);
- }
- else if (temp >= 64)
- {
- pcode = (uint8*) & (TotZero1[(temp>>5)+19][0]);
- }
- else if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero1[(temp>>2)+5][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero1[temp-1][0]);
- }
-
- }
- else if (TotalCoeff == 2 || TotalCoeff == 3)
- {
- BitstreamShowBits(stream, 6, &temp);
-
- if (temp >= 32)
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>3)+10][0]);
- }
- else if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][(temp>>2)+6][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero2n3[TotalCoeff-2][temp][0]);
- }
- }
- else if (TotalCoeff == 4)
- {
- BitstreamShowBits(stream, 5, &temp);
-
- if (temp >= 12)
- {
- pcode = (uint8*) & (TotZero4[(temp>>2)+9][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero4[temp][0]);
- }
- }
- else if (TotalCoeff == 5)
- {
- BitstreamShowBits(stream, 5, &temp);
-
- if (temp >= 16)
- {
- pcode = (uint8*) & (TotZero5[(temp>>2)+5][0]);
- }
- else if (temp >= 2)
- {
- pcode = (uint8*) & (TotZero5[(temp>>1)+1][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero5[temp][0]);
- }
- }
- else if (TotalCoeff >= 6 && TotalCoeff <= 10)
- {
- if (TotalCoeff == 10)
- {
- BitstreamShowBits(stream, 5, &temp);
- }
- else
- {
- BitstreamShowBits(stream, 6, &temp);
- }
-
-
- if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][(temp>>3)+7][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero6to10[TotalCoeff-6][temp][0]);
- }
- }
- else if (TotalCoeff == 11)
- {
- BitstreamShowBits(stream, 4, &temp);
-
-
- if (temp >= 8)
- {
- pcode = (uint8*) & (TotZero11[6][0]);
- }
- else if (temp >= 4)
- {
- pcode = (uint8*) & (TotZero11[(temp>>1)+2][0]);
- }
- else
- {
- pcode = (uint8*) & (TotZero11[temp][0]);
- }
- }
- else
- {
- BitstreamShowBits(stream, (16 - TotalCoeff), &temp);
- mask = 1 << (15 - TotalCoeff);
- indx = 0;
- while ((temp&mask) == 0 && indx < (16 - TotalCoeff)) /* search location of 1 bit */
- {
- mask >>= 1;
- indx++;
- }
-
- pcode = (uint8*) & (TotZero12to15[TotalCoeff-12][indx]);
- }
-
- *code = pcode[0];
- BitstreamFlushBits(stream, pcode[1]);
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-9 */
-AVCDec_Status ce_TotalZerosChromaDC(AVCDecBitstream *stream, int *code, int TotalCoeff)
-{
- const static uint8 TotZeroChrom1to3[3][8][2] =
- {
- {{3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- {{2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- {{1, 1}, {1, 1}, {1, 1}, {1, 1}, {0, 1}, {0, 1}, {0, 1}, {0, 1}},
- };
-
-
- uint temp;
- uint8 *pcode;
-
- BitstreamShowBits(stream, 3, &temp);
- pcode = (uint8*) & (TotZeroChrom1to3[TotalCoeff-1][temp]);
-
- *code = pcode[0];
-
- BitstreamFlushBits(stream, pcode[1]);
-
- return AVCDEC_SUCCESS;
-}
-
-/* see Table 9-10 */
-AVCDec_Status ce_RunBefore(AVCDecBitstream *stream, int *code, int zerosLeft)
-{
- const static int codlen[6] = {1, 2, 2, 3, 3, 3}; /* num bits to read */
- const static uint8 RunBeforeTab[6][8][2] = {{{1, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore2[4][2]=*/{{2, 2}, {1, 2}, {0, 1}, {0, 1}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore3[4][2]=*/{{3, 2}, {2, 2}, {1, 2}, {0, 2}, {0, 0}, {0, 0}, {0, 0}, {0, 0}},
- /*const static int RunBefore4[7][2]=*/{{4, 3}, {3, 3}, {2, 2}, {2, 2}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
- /*const static int RunBefore5[7][2]=*/{{5, 3}, {4, 3}, {3, 3}, {2, 3}, {1, 2}, {1, 2}, {0, 2}, {0, 2}},
- /*const static int RunBefore6[7][2]=*/{{1, 3}, {2, 3}, {4, 3}, {3, 3}, {6, 3}, {5, 3}, {0, 2}, {0, 2}}
- };
-
- uint temp;
- uint8 *pcode;
- int indx;
-
- if (zerosLeft <= 6)
- {
- BitstreamShowBits(stream, codlen[zerosLeft-1], &temp);
-
- pcode = (uint8*) & (RunBeforeTab[zerosLeft-1][temp][0]);
-
- *code = pcode[0];
-
- BitstreamFlushBits(stream, pcode[1]);
- }
- else
- {
- BitstreamReadBits(stream, 3, &temp);
- if (temp)
- {
- *code = 7 - temp;
- }
- else
- {
- BitstreamShowBits(stream, 9, &temp);
- temp <<= 7;
- temp |= 1;
- indx = 0;
- PV_CLZ(indx, temp)
- *code = 7 + indx;
- BitstreamFlushBits(stream, indx + 1);
- }
- }
-
-
- return AVCDEC_SUCCESS;
-}
diff --git a/media/libstagefright/colorconversion/SoftwareRenderer.cpp b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
index a4ca32d..3246021 100644
--- a/media/libstagefright/colorconversion/SoftwareRenderer.cpp
+++ b/media/libstagefright/colorconversion/SoftwareRenderer.cpp
@@ -93,6 +93,11 @@
GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
| GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
+ CHECK_EQ(0,
+ native_window_set_scaling_mode(
+ mNativeWindow.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW));
+
// Width must be multiple of 32???
CHECK_EQ(0, native_window_set_buffers_geometry(
mNativeWindow.get(),
diff --git a/media/libstagefright/omx/SoftOMXPlugin.cpp b/media/libstagefright/omx/SoftOMXPlugin.cpp
index 02b1c8e..1e33f05f 100644
--- a/media/libstagefright/omx/SoftOMXPlugin.cpp
+++ b/media/libstagefright/omx/SoftOMXPlugin.cpp
@@ -38,7 +38,6 @@
{ "OMX.google.amrnb.decoder", "amrdec", "audio_decoder.amrnb" },
{ "OMX.google.amrwb.decoder", "amrdec", "audio_decoder.amrwb" },
{ "OMX.google.h264.decoder", "h264dec", "video_decoder.avc" },
- { "OMX.google.avc.decoder", "avcdec", "video_decoder.avc" },
{ "OMX.google.g711.alaw.decoder", "g711dec", "audio_decoder.g711alaw" },
{ "OMX.google.g711.mlaw.decoder", "g711dec", "audio_decoder.g711mlaw" },
{ "OMX.google.h263.decoder", "mpeg4dec", "video_decoder.h263" },
diff --git a/native/android/native_window.cpp b/native/android/native_window.cpp
index 5c016c4..36fc9bf 100644
--- a/native/android/native_window.cpp
+++ b/native/android/native_window.cpp
@@ -76,7 +76,15 @@
int32_t ANativeWindow_setBuffersGeometry(ANativeWindow* window, int32_t width,
int32_t height, int32_t format) {
- return native_window_set_buffers_geometry(window, width, height, format);
+ int32_t err = native_window_set_buffers_geometry(window, width, height, format);
+ if (!err) {
+ int mode = NATIVE_WINDOW_SCALING_MODE_FREEZE;
+ if (width && height) {
+ mode = NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW;
+ }
+ err = native_window_set_scaling_mode(window, mode);
+ }
+ return err;
}
int32_t ANativeWindow_lock(ANativeWindow* window, ANativeWindow_Buffer* outBuffer,
diff --git a/services/camera/libcameraservice/CameraService.cpp b/services/camera/libcameraservice/CameraService.cpp
index 07002ac..96b26e7 100644
--- a/services/camera/libcameraservice/CameraService.cpp
+++ b/services/camera/libcameraservice/CameraService.cpp
@@ -537,6 +537,8 @@
// If preview has been already started, register preview buffers now.
if (mHardware->previewEnabled()) {
if (window != 0) {
+ native_window_set_scaling_mode(window.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
native_window_set_buffers_transform(window.get(), mOrientation);
result = mHardware->setPreviewWindow(window);
}
@@ -643,6 +645,8 @@
}
if (mPreviewWindow != 0) {
+ native_window_set_scaling_mode(mPreviewWindow.get(),
+ NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
native_window_set_buffers_transform(mPreviewWindow.get(),
mOrientation);
}
diff --git a/services/java/com/android/server/ConnectivityService.java b/services/java/com/android/server/ConnectivityService.java
index 55c92e8..c7903c0 100644
--- a/services/java/com/android/server/ConnectivityService.java
+++ b/services/java/com/android/server/ConnectivityService.java
@@ -248,7 +248,7 @@
// list of DeathRecipients used to make sure features are turned off when
// a process dies
- private List mFeatureUsers;
+ private List<FeatureUser> mFeatureUsers;
private boolean mSystemReady;
private Intent mInitialBroadcast;
@@ -436,7 +436,7 @@
mNetRequestersPids[i] = new ArrayList();
}
- mFeatureUsers = new ArrayList();
+ mFeatureUsers = new ArrayList<FeatureUser>();
mNumDnsEntries = 0;
@@ -794,6 +794,20 @@
stopUsingNetworkFeature(this, false);
}
+ public boolean isSameUser(FeatureUser u) {
+ if (u == null) return false;
+
+ return isSameUser(u.mPid, u.mUid, u.mNetworkType, u.mFeature);
+ }
+
+ public boolean isSameUser(int pid, int uid, int networkType, String feature) {
+ if ((mPid == pid) && (mUid == uid) && (mNetworkType == networkType) &&
+ TextUtils.equals(mFeature, feature)) {
+ return true;
+ }
+ return false;
+ }
+
public String toString() {
return "FeatureUser("+mNetworkType+","+mFeature+","+mPid+","+mUid+"), created " +
(System.currentTimeMillis() - mCreateTime) + " mSec ago";
@@ -844,16 +858,29 @@
}
}
+ int restoreTimer = getRestoreDefaultNetworkDelay(usedNetworkType);
+
synchronized(this) {
- mFeatureUsers.add(f);
+ boolean addToList = true;
+ if (restoreTimer < 0) {
+ // In case there is no timer is specified for the feature,
+ // make sure we don't add duplicate entry with the same request.
+ for (FeatureUser u : mFeatureUsers) {
+ if (u.isSameUser(f)) {
+ // Duplicate user is found. Do not add.
+ addToList = false;
+ break;
+ }
+ }
+ }
+
+ if (addToList) mFeatureUsers.add(f);
if (!mNetRequestersPids[usedNetworkType].contains(currentPid)) {
// this gets used for per-pid dns when connected
mNetRequestersPids[usedNetworkType].add(currentPid);
}
}
- int restoreTimer = getRestoreDefaultNetworkDelay(usedNetworkType);
-
if (restoreTimer >= 0) {
mHandler.sendMessageDelayed(
mHandler.obtainMessage(EVENT_RESTORE_DEFAULT_NETWORK, f), restoreTimer);
@@ -903,11 +930,9 @@
boolean found = false;
synchronized(this) {
- for (int i = 0; i < mFeatureUsers.size() ; i++) {
- u = (FeatureUser)mFeatureUsers.get(i);
- if (uid == u.mUid && pid == u.mPid &&
- networkType == u.mNetworkType &&
- TextUtils.equals(feature, u.mFeature)) {
+ for (FeatureUser x : mFeatureUsers) {
+ if (x.isSameUser(pid, uid, networkType, feature)) {
+ u = x;
found = true;
break;
}
@@ -959,11 +984,8 @@
// do not pay attention to duplicate requests - in effect the
// API does not refcount and a single stop will counter multiple starts.
if (ignoreDups == false) {
- for (int i = 0; i < mFeatureUsers.size() ; i++) {
- FeatureUser x = (FeatureUser)mFeatureUsers.get(i);
- if (x.mUid == u.mUid && x.mPid == u.mPid &&
- x.mNetworkType == u.mNetworkType &&
- TextUtils.equals(x.mFeature, u.mFeature)) {
+ for (FeatureUser x : mFeatureUsers) {
+ if (x.isSameUser(u)) {
if (DBG) log("ignoring stopUsingNetworkFeature as dup is found");
return 1;
}
diff --git a/services/java/com/android/server/MasterClearReceiver.java b/services/java/com/android/server/MasterClearReceiver.java
index bdb5a24..86f57d1 100644
--- a/services/java/com/android/server/MasterClearReceiver.java
+++ b/services/java/com/android/server/MasterClearReceiver.java
@@ -43,11 +43,7 @@
@Override
public void run() {
try {
- if (intent.hasExtra("enableEFS")) {
- RecoverySystem.rebootToggleEFS(context, intent.getBooleanExtra("enableEFS", false));
- } else {
- RecoverySystem.rebootWipeUserData(context);
- }
+ RecoverySystem.rebootWipeUserData(context);
Log.wtf(TAG, "Still running after master clear?!");
} catch (IOException e) {
Slog.e(TAG, "Can't perform master clear/factory reset", e);
diff --git a/services/surfaceflinger/Layer.cpp b/services/surfaceflinger/Layer.cpp
index f3b6c4d..c29aeca 100644
--- a/services/surfaceflinger/Layer.cpp
+++ b/services/surfaceflinger/Layer.cpp
@@ -56,14 +56,14 @@
mTextureName(-1U),
mQueuedFrames(0),
mCurrentTransform(0),
+ mCurrentScalingMode(NATIVE_WINDOW_SCALING_MODE_FREEZE),
mCurrentOpacity(true),
mFormat(PIXEL_FORMAT_NONE),
mGLExtensions(GLExtensions::getInstance()),
mOpaqueLayer(true),
mNeedsDithering(false),
mSecure(false),
- mProtectedByApp(false),
- mFixedSize(false)
+ mProtectedByApp(false)
{
mCurrentCrop.makeInvalid();
glGenTextures(1, &mTextureName);
@@ -400,14 +400,7 @@
}
bool Layer::isFixedSize() const {
- Mutex::Autolock _l(mLock);
- return mFixedSize;
-}
-
-void Layer::setFixedSize(bool fixedSize)
-{
- Mutex::Autolock _l(mLock);
- mFixedSize = fixedSize;
+ return mCurrentScalingMode != NATIVE_WINDOW_SCALING_MODE_FREEZE;
}
bool Layer::isCropped() const {
@@ -437,9 +430,14 @@
const Rect crop(mSurfaceTexture->getCurrentCrop());
const uint32_t transform(mSurfaceTexture->getCurrentTransform());
- if ((crop != mCurrentCrop) || (transform != mCurrentTransform)) {
+ const uint32_t scalingMode(mSurfaceTexture->getCurrentScalingMode());
+ if ((crop != mCurrentCrop) ||
+ (transform != mCurrentTransform) ||
+ (scalingMode != mCurrentScalingMode))
+ {
mCurrentCrop = crop;
mCurrentTransform = transform;
+ mCurrentScalingMode = scalingMode;
mFlinger->invalidateHwcGeometry();
}
diff --git a/services/surfaceflinger/Layer.h b/services/surfaceflinger/Layer.h
index e3fc13d..ddfc666 100644
--- a/services/surfaceflinger/Layer.h
+++ b/services/surfaceflinger/Layer.h
@@ -59,7 +59,6 @@
status_t setBuffers(uint32_t w, uint32_t h,
PixelFormat format, uint32_t flags=0);
- // Set this Layer's buffers size
bool isFixedSize() const;
// LayerBase interface
@@ -88,7 +87,6 @@
void onFrameQueued();
virtual sp<ISurface> createSurface();
uint32_t getEffectiveUsage(uint32_t usage) const;
- void setFixedSize(bool fixedSize);
bool isCropped() const;
static bool getOpacityForFormat(uint32_t format);
@@ -106,6 +104,7 @@
GLfloat mTextureMatrix[16];
Rect mCurrentCrop;
uint32_t mCurrentTransform;
+ uint32_t mCurrentScalingMode;
bool mCurrentOpacity;
// constants
@@ -124,7 +123,6 @@
// binder thread, transaction thread
mutable Mutex mLock;
- bool mFixedSize;
};
// ---------------------------------------------------------------------------
diff --git a/services/surfaceflinger/SurfaceTextureLayer.cpp b/services/surfaceflinger/SurfaceTextureLayer.cpp
index 60fa965..a586d59 100644
--- a/services/surfaceflinger/SurfaceTextureLayer.cpp
+++ b/services/surfaceflinger/SurfaceTextureLayer.cpp
@@ -64,9 +64,6 @@
//LOGD("%s, w=%u, h=%u, format=%u, usage=%08x, effectiveUsage=%08x",
// __PRETTY_FUNCTION__, w, h, format, usage, effectiveUsage);
res = SurfaceTexture::dequeueBuffer(buf, w, h, format, effectiveUsage);
- if (res == NO_ERROR) {
- layer->setFixedSize(w && h);
- }
}
return res;
}
diff --git a/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java b/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
index 0d9d27d..ac66b48 100644
--- a/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
+++ b/telephony/java/com/android/internal/telephony/cdma/CDMALTEPhone.java
@@ -141,6 +141,11 @@
}
@Override
+ public String getDeviceSvn() {
+ return mImeiSv;
+ }
+
+ @Override
protected void log(String s) {
if (DBG)
Log.d(LOG_TAG, "[CDMALTEPhone] " + s);
diff --git a/tests/BiDiTests/res/drawable/alphabet_a.png b/tests/BiDiTests/res/drawable/alphabet_a.png
new file mode 100644
index 0000000..2a80ec1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_a.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_b.png b/tests/BiDiTests/res/drawable/alphabet_b.png
new file mode 100644
index 0000000..ac887ad
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_b.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_c.png b/tests/BiDiTests/res/drawable/alphabet_c.png
new file mode 100644
index 0000000..f8cc5c6
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_c.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_d.png b/tests/BiDiTests/res/drawable/alphabet_d.png
new file mode 100644
index 0000000..764dfe5
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_d.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_e.png b/tests/BiDiTests/res/drawable/alphabet_e.png
new file mode 100644
index 0000000..dbd00e1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_e.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_f.png b/tests/BiDiTests/res/drawable/alphabet_f.png
new file mode 100644
index 0000000..f6a1bbe
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_f.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_g.png b/tests/BiDiTests/res/drawable/alphabet_g.png
new file mode 100644
index 0000000..e9d360c
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_g.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_h.png b/tests/BiDiTests/res/drawable/alphabet_h.png
new file mode 100644
index 0000000..cbc4eb1
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_h.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_i.png b/tests/BiDiTests/res/drawable/alphabet_i.png
new file mode 100644
index 0000000..bae2103
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_i.png
Binary files differ
diff --git a/tests/BiDiTests/res/drawable/alphabet_j.png b/tests/BiDiTests/res/drawable/alphabet_j.png
new file mode 100644
index 0000000..264c6a7
--- /dev/null
+++ b/tests/BiDiTests/res/drawable/alphabet_j.png
Binary files differ
diff --git a/tests/BiDiTests/res/layout/gallery_ltr.xml b/tests/BiDiTests/res/layout/gallery_ltr.xml
new file mode 100644
index 0000000..d0e4168
--- /dev/null
+++ b/tests/BiDiTests/res/layout/gallery_ltr.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/gallery_ltr"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layoutDirection="ltr">
+
+ <Gallery
+ android:id="@+id/galleryview"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:spacing="10dip"
+ />
+
+</FrameLayout>
diff --git a/tests/BiDiTests/res/layout/gallery_rtl.xml b/tests/BiDiTests/res/layout/gallery_rtl.xml
new file mode 100644
index 0000000..c5c2f5c
--- /dev/null
+++ b/tests/BiDiTests/res/layout/gallery_rtl.xml
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:id="@+id/gallery_rtl"
+ android:layout_width="fill_parent"
+ android:layout_height="fill_parent"
+ android:layoutDirection="rtl">
+
+ <Gallery
+ android:id="@+id/galleryview"
+ android:layout_width="fill_parent"
+ android:layout_height="wrap_content"
+ android:spacing="10dip"
+ />
+
+</FrameLayout>
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
index 6b38cc1..b45b98f 100644
--- a/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestActivity.java
@@ -137,6 +137,8 @@
addItem(result, "TextView Drawables LTR", BiDiTestTextViewDrawablesLtr.class, R.id.textview_drawables_ltr);
addItem(result, "TextView Drawables RTL", BiDiTestTextViewDrawablesRtl.class, R.id.textview_drawables_rtl);
+ addItem(result, "Gallery LTR", BiDiTestGalleryLtr.class, R.id.gallery_ltr);
+ addItem(result, "Gallery RTL", BiDiTestGalleryRtl.class, R.id.gallery_rtl);
return result;
}
@@ -147,4 +149,4 @@
inflater.inflate(R.menu.main_menu, menu);
return true;
}
-}
\ No newline at end of file
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java
new file mode 100644
index 0000000..adc17e1
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryImages.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.content.Context;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.BaseAdapter;
+import android.widget.ImageView;
+import android.widget.ImageView.ScaleType;
+
+public class BiDiTestGalleryImages extends BaseAdapter {
+ int mGalleryItemBackground;
+ private Context mContext;
+
+ private Integer[] mImageIds = {
+ R.drawable.alphabet_a,
+ R.drawable.alphabet_b,
+ R.drawable.alphabet_c,
+ R.drawable.alphabet_d,
+ R.drawable.alphabet_e,
+ R.drawable.alphabet_f,
+ R.drawable.alphabet_g,
+ R.drawable.alphabet_h,
+ R.drawable.alphabet_i,
+ R.drawable.alphabet_j,
+ };
+
+ public BiDiTestGalleryImages(Context c) {
+ mContext = c;
+ }
+
+ @Override
+ public int getCount() {
+ return mImageIds.length;
+ }
+
+ @Override
+ public Object getItem(int position) {
+ return position;
+ }
+
+ @Override
+ public long getItemId(int position) {
+ return position;
+ }
+
+ @Override
+ public View getView(int position, View convertView, ViewGroup parent) {
+ ImageView i = new ImageView(mContext);
+ i.setImageResource(mImageIds[position]);
+ i.setScaleType(ScaleType.CENTER_INSIDE);
+ return i;
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java
new file mode 100644
index 0000000..fa86b1a
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryLtr.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Gallery;
+
+public class BiDiTestGalleryLtr extends Fragment {
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.gallery_ltr, container, false);
+ Gallery g = (Gallery) v.findViewById(R.id.galleryview);
+ g.setAdapter(new BiDiTestGalleryImages(this.getActivity().getBaseContext()));
+ return v;
+ }
+}
diff --git a/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java
new file mode 100644
index 0000000..4cef658
--- /dev/null
+++ b/tests/BiDiTests/src/com/android/bidi/BiDiTestGalleryRtl.java
@@ -0,0 +1,35 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.android.bidi;
+
+import android.app.Fragment;
+import android.os.Bundle;
+import android.view.LayoutInflater;
+import android.view.View;
+import android.view.ViewGroup;
+import android.widget.Gallery;
+
+public class BiDiTestGalleryRtl extends Fragment {
+
+ @Override
+ public View onCreateView(LayoutInflater inflater, ViewGroup container,
+ Bundle savedInstanceState) {
+ View v = inflater.inflate(R.layout.gallery_rtl, container, false);
+ Gallery g = (Gallery) v.findViewById(R.id.galleryview);
+ g.setAdapter(new BiDiTestGalleryImages(this.getActivity().getBaseContext()));
+ return v;
+ }
+}
diff --git a/tests/TileBenchmark/Android.mk b/tests/TileBenchmark/Android.mk
new file mode 100644
index 0000000..430f0f1
--- /dev/null
+++ b/tests/TileBenchmark/Android.mk
@@ -0,0 +1,32 @@
+# Copyright (C) 2011 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_MODULE_TAGS := optional
+
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
+
+LOCAL_PACKAGE_NAME := TileBenchmark
+
+include $(BUILD_PACKAGE)
+
+##################################################
+include $(CLEAR_VARS)
+
+include $(BUILD_MULTI_PREBUILT)
+
+# Use the folloing include to make our test apk.
+include $(call all-makefiles-under,$(LOCAL_PATH))
diff --git a/tests/TileBenchmark/AndroidManifest.xml b/tests/TileBenchmark/AndroidManifest.xml
new file mode 100644
index 0000000..663cc0d
--- /dev/null
+++ b/tests/TileBenchmark/AndroidManifest.xml
@@ -0,0 +1,20 @@
+<?xml version="1.0" encoding="utf-8"?>
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+ android:versionCode="1"
+ android:versionName="1.0" package="com.test.tilebenchmark">
+ <uses-permission android:name="android.permission.INTERNET"/>
+ <application android:icon="@drawable/icon"
+ android:label="@string/app_name"
+ android:hardwareAccelerated="true">
+ <activity android:name=".ProfileActivity"
+ android:label="@string/profile_activity">
+ <intent-filter>
+ <action android:name="android.intent.action.MAIN" />
+ <category android:name="android.intent.category.LAUNCHER" />
+ </intent-filter>
+ </activity>
+ <activity android:name=".PlaybackActivity"
+ android:label="@string/playback_activity">
+ </activity>
+ </application>
+</manifest>
diff --git a/tests/TileBenchmark/res/drawable-hdpi/icon.png b/tests/TileBenchmark/res/drawable-hdpi/icon.png
new file mode 100644
index 0000000..8074c4c
--- /dev/null
+++ b/tests/TileBenchmark/res/drawable-hdpi/icon.png
Binary files differ
diff --git a/tests/TileBenchmark/res/drawable-ldpi/icon.png b/tests/TileBenchmark/res/drawable-ldpi/icon.png
new file mode 100644
index 0000000..1095584
--- /dev/null
+++ b/tests/TileBenchmark/res/drawable-ldpi/icon.png
Binary files differ
diff --git a/tests/TileBenchmark/res/drawable-mdpi/icon.png b/tests/TileBenchmark/res/drawable-mdpi/icon.png
new file mode 100644
index 0000000..a07c69f
--- /dev/null
+++ b/tests/TileBenchmark/res/drawable-mdpi/icon.png
Binary files differ
diff --git a/tests/TileBenchmark/res/layout/main.xml b/tests/TileBenchmark/res/layout/main.xml
new file mode 100644
index 0000000..4a81da6
--- /dev/null
+++ b/tests/TileBenchmark/res/layout/main.xml
@@ -0,0 +1,53 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ >
+ <LinearLayout
+ android:id="@+id/top"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ >
+ <Button
+ android:id="@+id/inspect"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/inspect_log"
+ />
+ <Spinner
+ android:id="@+id/velocity"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:gravity="center_horizontal"
+ android:prompt="@string/desired_scroll_velocity"
+ />
+ <EditText
+ android:id="@+id/url"
+ android:layout_width="0dip"
+ android:layout_height="wrap_content"
+ android:inputType="textUri"
+ android:imeOptions="actionGo"
+ android:layout_weight="1"
+ />
+ </LinearLayout>
+ <com.test.tilebenchmark.ProfiledWebView
+ android:id="@+id/web"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ />
+</LinearLayout>
diff --git a/tests/TileBenchmark/res/layout/playback.xml b/tests/TileBenchmark/res/layout/playback.xml
new file mode 100644
index 0000000..aa1c8a4
--- /dev/null
+++ b/tests/TileBenchmark/res/layout/playback.xml
@@ -0,0 +1,58 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
+ android:orientation="vertical"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ >
+ <LinearLayout
+ android:id="@+id/top"
+ android:layout_width="match_parent"
+ android:layout_height="wrap_content"
+ >
+ <Button
+ android:id="@+id/backward"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/backward"
+ />
+ <TextView
+ android:id="@+id/frame_display"
+ android:layout_width="0dip"
+ android:layout_height="wrap_content"
+ android:gravity="center_horizontal"
+ android:textAppearance="?android:attr/textAppearanceLarge"
+ android:layout_weight="1"
+ />
+ <Button
+ android:id="@+id/forward"
+ android:layout_width="wrap_content"
+ android:layout_height="wrap_content"
+ android:text="@string/forward"
+ />
+ <SeekBar
+ android:id="@+id/seek_bar"
+ android:layout_width="0dip"
+ android:layout_height="wrap_content"
+ android:layout_weight="10"
+ />
+ </LinearLayout>
+ <com.test.tilebenchmark.PlaybackView
+ android:id="@+id/playback"
+ android:layout_width="match_parent"
+ android:layout_height="match_parent"
+ />
+</LinearLayout>
diff --git a/tests/TileBenchmark/res/values/colors.xml b/tests/TileBenchmark/res/values/colors.xml
new file mode 100644
index 0000000..3958083
--- /dev/null
+++ b/tests/TileBenchmark/res/values/colors.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<resources>
+ <!-- The color of tiles with valid textures -->
+ <color name="ready_tile">#ff4ac230</color>
+ <!-- The color of tiles with stale / invalid textures -->
+ <color name="unready_tile">#ff744400</color>
+ <!-- Background color for logged URLs -->
+ <color name="finished_url">#ff004000</color>
+ <!-- Background color for URLs with logging in progress -->
+ <color name="unfinished_url">#ff400000</color>
+</resources>
diff --git a/tests/TileBenchmark/res/values/strings.xml b/tests/TileBenchmark/res/values/strings.xml
new file mode 100644
index 0000000..f70ee2c
--- /dev/null
+++ b/tests/TileBenchmark/res/values/strings.xml
@@ -0,0 +1,67 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2011 The Android Open Source Project
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<resources>
+ <!-- Button, steps back a single frame [CHAR LIMIT=15] -->
+ <string name="backward">Backward</string>
+ <!-- Button, steps forward a single frame [CHAR LIMIT=15] -->
+ <string name="forward">Forward</string>
+ <!-- The name of the application [CHAR LIMIT=20] -->
+ <string name="app_name">TileBenchmark</string>
+ <!-- name of the auto-scroller / tile logger activity [CHAR LIMIT=100] -->
+ <string name="profile_activity">Webview Profiler</string>
+ <!-- name of the tile log playback activity [CHAR LIMIT=100] -->
+ <string name="playback_activity">Webview Tile Playback</string>
+ <!-- Button, loads another tile log [CHAR LIMIT=30] -->
+ <string name="loadbutton">Load</string>
+ <!-- Button, opens the playback activity [CHAR LIMIT=20] -->
+ <string name="inspect_log">Inspect Log</string>
+ <!-- The speed of auto-scrolling [CHAR LIMIT=30] -->
+ <string name="desired_scroll_velocity">Choose Scroll Velocity</string>
+ <!-- Pixels moved per frame [CHAR LIMIT=10] -->
+ <string-array name="velocity_array">
+ <item>1</item>
+ <item>25</item>
+ <item>50</item>
+ <item>100</item>
+ <item>200</item>
+ <item>400</item>
+ </string-array>
+ <!-- 25th percentile - 25% of frames fall below this value [CHAR LIMIT=12]
+ -->
+ <string name="percentile_25">25%ile</string>
+ <!-- 50th percentile - 50% of frames fall below this value (aka median)
+ [CHAR LIMIT=12] -->
+ <string name="percentile_50">median</string>
+ <!-- 75th percentile - 75% of frames fall below this value [CHAR LIMIT=12]
+ -->
+ <string name="percentile_75">75%ile</string>
+ <!-- Frame rate [CHAR LIMIT=15] -->
+ <string name="frames_per_second">Frames/sec</string>
+ <!-- Portion of viewport covered by good tiles [CHAR LIMIT=15] -->
+ <string name="viewport_coverage">Coverage</string>
+ <!-- Format string for stat value overlay [CHAR LIMIT=15] -->
+ <string name="format_stat">%4.4f</string>
+ <!-- Format string for displaying aggregate stats+values (nr of valid tiles,
+ etc.) [CHAR LIMIT=20] -->
+ <string name="format_stat_name">%1$9s %2$3d</string>
+ <!-- Text hovering over canvas, number of tiles ready [CHAR LIMIT=15] -->
+ <string name="ready_tiles">Ready Tiles</string>
+ <!-- Text hovering over canvas, number tiles not ready [CHAR LIMIT=15] -->
+ <string name="unready_tiles">Unready Tiles</string>
+ <!-- Text hovering over canvas, number of tiles that haven't been
+ allocated to a place on the page [CHAR LIMIT=15] -->
+ <string name="unplaced_tiles">Unplaced Tiles</string>
+</resources>
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackActivity.java b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackActivity.java
new file mode 100644
index 0000000..5130f5d
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackActivity.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import android.app.Activity;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.view.GestureDetector.SimpleOnGestureListener;
+import android.view.MotionEvent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.widget.Button;
+import android.widget.SeekBar;
+import android.widget.SeekBar.OnSeekBarChangeListener;
+import android.widget.TextView;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+
+/**
+ * Interface for playing back WebView tile rendering status. Draws viewport and
+ * states of tiles and statistics for off-line analysis.
+ */
+public class PlaybackActivity extends Activity {
+ private static final float SCROLL_SCALER = 0.125f;
+
+ PlaybackView mPlaybackView;
+ SeekBar mSeekBar;
+ Button mForward;
+ Button mBackward;
+ TextView mFrameDisplay;
+
+ private int mFrame = -1;
+ private int mFrameMax;
+
+ private class TouchFrameChangeListener extends SimpleOnGestureListener {
+ float mDist = 0;
+
+ @Override
+ public boolean onScroll(MotionEvent e1, MotionEvent e2,
+ float distanceX, float distanceY) {
+ // aggregate scrolls so that small ones can add up
+ mDist += distanceY * SCROLL_SCALER;
+ int intComponent = (int) Math.floor(Math.abs(mDist));
+ if (intComponent >= 1) {
+ int scrollDist = (mDist > 0) ? intComponent : -intComponent;
+ setFrame(null, mFrame + scrollDist);
+ mDist -= scrollDist;
+ }
+ return super.onScroll(e1, e2, distanceX, distanceY);
+ }
+ };
+
+ private class SeekFrameChangeListener implements OnSeekBarChangeListener {
+ @Override
+ public void onStopTrackingTouch(SeekBar seekBar) {
+ }
+
+ @Override
+ public void onStartTrackingTouch(SeekBar seekBar) {
+ }
+
+ @Override
+ public void onProgressChanged(SeekBar seekBar, int progress,
+ boolean fromUser) {
+ setFrame(seekBar, progress);
+ }
+ };
+
+ private class LoadFileTask extends AsyncTask<String, Void, TileData[][]> {
+ @Override
+ protected TileData[][] doInBackground(String... params) {
+ TileData[][] data = null;
+ try {
+ FileInputStream fis = openFileInput(params[0]);
+ ObjectInputStream in = new ObjectInputStream(fis);
+ data = (TileData[][]) in.readObject();
+ in.close();
+ } catch (IOException ex) {
+ ex.printStackTrace();
+ } catch (ClassNotFoundException ex) {
+ ex.printStackTrace();
+ }
+ return data;
+ }
+
+ @Override
+ protected void onPostExecute(TileData data[][]) {
+ if (data == null) {
+ data = genTestPattern();
+ }
+ mPlaybackView.setData(data);
+
+ mFrameMax = data.length - 1;
+ mSeekBar.setMax(mFrameMax);
+
+ setFrame(null, 0);
+ }
+ }
+
+ private void setFrame(View changer, int f) {
+ if (f < 0) {
+ f = 0;
+ } else if (f > mFrameMax) {
+ f = mFrameMax;
+ }
+
+ if (mFrame == f) {
+ return;
+ }
+
+ mFrame = f;
+ mForward.setEnabled(mFrame != mFrameMax);
+ mBackward.setEnabled(mFrame != 0);
+ if (changer != mSeekBar) {
+ mSeekBar.setProgress(mFrame);
+ }
+ mFrameDisplay.setText(Integer.toString(mFrame));
+ mPlaybackView.setFrame(mFrame);
+ };
+
+ /** Called when the activity is first created. */
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.playback);
+
+ mPlaybackView = (PlaybackView) findViewById(R.id.playback);
+ mSeekBar = (SeekBar) findViewById(R.id.seek_bar);
+ mForward = (Button) findViewById(R.id.forward);
+ mBackward = (Button) findViewById(R.id.backward);
+ mFrameDisplay = (TextView) findViewById(R.id.frame_display);
+
+ mForward.setOnClickListener(new OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ setFrame(v, mFrame + 1);
+ }
+ });
+
+ mBackward.setOnClickListener(new OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ setFrame(v, mFrame - 1);
+ }
+ });
+
+ mSeekBar.setOnSeekBarChangeListener(new SeekFrameChangeListener());
+
+ mPlaybackView.setOnGestureListener(new TouchFrameChangeListener());
+
+ new LoadFileTask().execute(ProfileActivity.TEMP_FILENAME);
+ }
+
+ private TileData[][] genTestPattern() {
+ final int XMAX = 5;
+ final int FRAMEMAX = 99;
+
+ TileData example[][] = new TileData[FRAMEMAX][];
+ for (int frame = 0; frame < FRAMEMAX; frame++) {
+ int numTiles = frame + 10;
+
+ example[frame] = new TileData[numTiles];
+ for (int t = 0; t < numTiles; t++) {
+ int x = t % XMAX;
+ int y = t / XMAX;
+ boolean isReady = y * 10 < frame;
+ example[frame][t] = new TileData(x, y, isReady, 0);
+ }
+ }
+ return example;
+ }
+}
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackGraphs.java b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackGraphs.java
new file mode 100644
index 0000000..db4a341
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackGraphs.java
@@ -0,0 +1,264 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import android.content.res.Resources;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.Rect;
+import android.graphics.drawable.ShapeDrawable;
+import android.os.Bundle;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+
+public class PlaybackGraphs {
+ private static final int BAR_WIDTH = PlaybackView.TILEX * 3;
+ private static final float CANVAS_SCALE = 0.2f;
+ private static final double IDEAL_FRAMES = 60;
+ private static final int LABELOFFSET = 100;
+ private static Paint whiteLabels;
+
+ private static double viewportCoverage(int l, int b, int r, int t,
+ int tileIndexX,
+ int tileIndexY) {
+ if (tileIndexX * PlaybackView.TILEX < r
+ && (tileIndexX + 1) * PlaybackView.TILEX >= l
+ && tileIndexY * PlaybackView.TILEY < t
+ && (tileIndexY + 1) * PlaybackView.TILEY >= b) {
+ return 1.0f;
+ }
+ return 0.0f;
+ }
+
+ private interface MetricGen {
+ public double getValue(TileData[] frame);
+
+ public double getMax();
+
+ public int getLabelId();
+ };
+
+ private static MetricGen[] Metrics = new MetricGen[] {
+ new MetricGen() {
+ // framerate graph
+ @Override
+ public double getValue(TileData[] frame) {
+ int renderTimeUS = frame[0].level;
+ return 1.0e6f / renderTimeUS;
+ }
+
+ @Override
+ public double getMax() {
+ return IDEAL_FRAMES;
+ }
+
+ @Override
+ public int getLabelId() {
+ return R.string.frames_per_second;
+ }
+ }, new MetricGen() {
+ // coverage graph
+ @Override
+ public double getValue(TileData[] frame) {
+ int l = frame[0].x, b = frame[0].y;
+ int r = frame[1].x, t = frame[1].y;
+ double total = 0, totalCount = 0;
+ for (int tileID = 2; tileID < frame.length; tileID++) {
+ TileData data = frame[tileID];
+ double coverage = viewportCoverage(l, b, r, t, data.x,
+ data.y);
+ total += coverage * (data.isReady ? 1 : 0);
+ totalCount += coverage;
+ }
+ if (totalCount == 0) {
+ return -1;
+ }
+ return total / totalCount;
+ }
+
+ @Override
+ public double getMax() {
+ return 1;
+ }
+
+ @Override
+ public int getLabelId() {
+ return R.string.viewport_coverage;
+ }
+ }
+ };
+
+ private interface StatGen {
+ public double getValue(double sortedValues[]);
+
+ public int getLabelId();
+ }
+
+ public static double getPercentile(double sortedValues[], double ratioAbove) {
+ double index = ratioAbove * (sortedValues.length - 1);
+ int intIndex = (int) Math.floor(index);
+ if (index == intIndex) {
+ return sortedValues[intIndex];
+ }
+ double alpha = index - intIndex;
+ return sortedValues[intIndex] * (1 - alpha)
+ + sortedValues[intIndex + 1] * (alpha);
+ }
+
+ private static StatGen[] Stats = new StatGen[] {
+ new StatGen() {
+ @Override
+ public double getValue(double[] sortedValues) {
+ return getPercentile(sortedValues, 0.25);
+ }
+
+ @Override
+ public int getLabelId() {
+ return R.string.percentile_25;
+ }
+ }, new StatGen() {
+ @Override
+ public double getValue(double[] sortedValues) {
+ return getPercentile(sortedValues, 0.5);
+ }
+
+ @Override
+ public int getLabelId() {
+ return R.string.percentile_50;
+ }
+ }, new StatGen() {
+ @Override
+ public double getValue(double[] sortedValues) {
+ return getPercentile(sortedValues, 0.75);
+ }
+
+ @Override
+ public int getLabelId() {
+ return R.string.percentile_75;
+ }
+ },
+ };
+
+ public PlaybackGraphs() {
+ whiteLabels = new Paint();
+ whiteLabels.setColor(Color.WHITE);
+ whiteLabels.setTextSize(PlaybackView.TILEY / 3);
+ }
+
+ private ArrayList<ShapeDrawable> mShapes = new ArrayList<ShapeDrawable>();
+ private double[][] mStats = new double[Metrics.length][Stats.length];
+
+ public void setData(TileData[][] tileProfilingData) {
+ mShapes.clear();
+ double metricValues[] = new double[tileProfilingData.length];
+
+ if (tileProfilingData.length == 0) {
+ return;
+ }
+
+ for (int metricIndex = 0; metricIndex < Metrics.length; metricIndex++) {
+ // create graph out of rectangles, one per frame
+ int lastBar = 0;
+ for (int frameIndex = 0; frameIndex < tileProfilingData.length; frameIndex++) {
+ TileData frame[] = tileProfilingData[frameIndex];
+ int newBar = (frame[0].y + frame[1].y) / 2;
+
+ MetricGen s = Metrics[metricIndex];
+ double absoluteValue = s.getValue(frame);
+ double relativeValue = absoluteValue / s.getMax();
+ int rightPos = (int) (-BAR_WIDTH * metricIndex);
+ int leftPos = (int) (-BAR_WIDTH * (metricIndex + relativeValue));
+
+ ShapeDrawable graphBar = new ShapeDrawable();
+ graphBar.getPaint().setColor(Color.BLUE);
+ graphBar.setBounds(leftPos, lastBar, rightPos, newBar);
+
+ mShapes.add(graphBar);
+ metricValues[frameIndex] = absoluteValue;
+ lastBar = newBar;
+ }
+
+ // store aggregate statistics per metric (median, and similar)
+ Arrays.sort(metricValues);
+ for (int statIndex = 0; statIndex < Stats.length; statIndex++) {
+ mStats[metricIndex][statIndex] = Stats[statIndex]
+ .getValue(metricValues);
+ }
+ }
+ }
+
+ public void drawVerticalShiftedShapes(Canvas canvas,
+ ArrayList<ShapeDrawable> shapes) {
+ // Shapes drawn here are drawn relative to the viewRect
+ Rect viewRect = shapes.get(shapes.size() - 1).getBounds();
+ canvas.translate(0, 5 * PlaybackView.TILEY - viewRect.top);
+
+ for (ShapeDrawable shape : mShapes) {
+ shape.draw(canvas);
+ }
+ for (ShapeDrawable shape : shapes) {
+ shape.draw(canvas);
+ }
+ }
+
+ public void draw(Canvas canvas, ArrayList<ShapeDrawable> shapes,
+ String[] strings, Resources resources) {
+ canvas.scale(CANVAS_SCALE, CANVAS_SCALE);
+
+ canvas.translate(BAR_WIDTH * Metrics.length, 0);
+
+ canvas.save();
+ drawVerticalShiftedShapes(canvas, shapes);
+ canvas.restore();
+
+ for (int metricIndex = 0; metricIndex < Metrics.length; metricIndex++) {
+ String label = resources.getString(
+ Metrics[metricIndex].getLabelId());
+ int xPos = (metricIndex + 1) * -BAR_WIDTH;
+ int yPos = LABELOFFSET;
+ canvas.drawText(label, xPos, yPos, whiteLabels);
+ for (int statIndex = 0; statIndex < Stats.length; statIndex++) {
+ label = resources.getString(R.string.format_stat, mStats[metricIndex][statIndex]);
+ yPos = LABELOFFSET + (1 + statIndex) * PlaybackView.TILEY / 2;
+ canvas.drawText(label, xPos, yPos, whiteLabels);
+ }
+ }
+ for (int stringIndex = 0; stringIndex < strings.length; stringIndex++) {
+ int yPos = LABELOFFSET + stringIndex * PlaybackView.TILEY / 2;
+ canvas.drawText(strings[stringIndex], 0, yPos, whiteLabels);
+ }
+ }
+
+ public Bundle getStatBundle(Resources resources) {
+ Bundle b = new Bundle();
+
+ for (int metricIndex = 0; metricIndex < Metrics.length; metricIndex++) {
+ for (int statIndex = 0; statIndex < Stats.length; statIndex++) {
+ String metricLabel = resources.getString(
+ Metrics[metricIndex].getLabelId());
+ String statLabel = resources.getString(
+ Stats[statIndex].getLabelId());
+ double value = mStats[metricIndex][statIndex];
+ b.putDouble(metricLabel + " " + statLabel, value);
+ }
+ }
+
+ return b;
+ }
+}
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackView.java b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackView.java
new file mode 100644
index 0000000..f104eac
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/PlaybackView.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import android.content.Context;
+import android.graphics.Canvas;
+import android.graphics.Color;
+import android.graphics.Paint;
+import android.graphics.drawable.ShapeDrawable;
+import android.util.AttributeSet;
+import android.view.GestureDetector;
+import android.view.GestureDetector.OnGestureListener;
+import android.view.MotionEvent;
+import android.view.View;
+
+import java.util.ArrayList;
+
+public class PlaybackView extends View {
+ public static final int TILEX = 300;
+ public static final int TILEY = 300;
+
+ private Paint levelPaint = null, coordPaint = null, goldPaint = null;
+ private PlaybackGraphs mGraphs;
+
+ private ArrayList<ShapeDrawable> mTempShapes = new ArrayList<ShapeDrawable>();
+ private TileData mProfData[][] = null;
+ private GestureDetector mGestureDetector = null;
+ private String mRenderStrings[] = new String[3];
+
+ private class TileDrawable extends ShapeDrawable {
+ TileData tile;
+
+ public TileDrawable(TileData t) {
+ int tileColorId = t.isReady ? R.color.ready_tile
+ : R.color.unready_tile;
+ getPaint().setColor(getResources().getColor(tileColorId));
+
+ setBounds(t.x * TILEX, t.y * TILEY, (t.x + 1) * TILEX, (t.y + 1)
+ * TILEY);
+ this.tile = t;
+ }
+
+ @Override
+ public void draw(Canvas canvas) {
+ super.draw(canvas);
+ canvas.drawText(Integer.toString(tile.level), getBounds().left,
+ getBounds().bottom, levelPaint);
+ canvas.drawText(tile.x + "," + tile.y, getBounds().left,
+ ((getBounds().bottom + getBounds().top) / 2), coordPaint);
+ }
+ }
+
+ public PlaybackView(Context context) {
+ super(context);
+ init();
+ }
+
+ public PlaybackView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ init();
+ }
+
+ public PlaybackView(Context context, AttributeSet attrs, int defStyle) {
+ super(context, attrs, defStyle);
+ init();
+ }
+
+ public void setOnGestureListener(OnGestureListener gl) {
+ mGestureDetector = new GestureDetector(getContext(), gl);
+ }
+
+ @Override
+ public boolean onTouchEvent(MotionEvent event) {
+ mGestureDetector.onTouchEvent(event);
+ return true;
+ }
+
+ private void init() {
+ levelPaint = new Paint();
+ levelPaint.setColor(Color.WHITE);
+ levelPaint.setTextSize(TILEY / 2);
+ coordPaint = new Paint();
+ coordPaint.setColor(Color.BLACK);
+ coordPaint.setTextSize(TILEY / 3);
+ goldPaint = new Paint();
+ goldPaint.setColor(0xffa0e010);
+ mGraphs = new PlaybackGraphs();
+ }
+
+ @Override
+ protected void onDraw(Canvas canvas) {
+ super.onDraw(canvas);
+
+ if (mTempShapes == null || mTempShapes.isEmpty()) {
+ return;
+ }
+
+ mGraphs.draw(canvas, mTempShapes, mRenderStrings, getResources());
+ }
+
+ public int setFrame(int frame) {
+ if (mProfData == null || mProfData.length == 0) {
+ return 0;
+ }
+
+ int readyTiles = 0, unreadyTiles = 0, unplacedTiles = 0;
+ mTempShapes.clear();
+
+ // draw actual tiles
+ for (int tileID = 2; tileID < mProfData[frame].length; tileID++) {
+ TileData t = mProfData[frame][tileID];
+ mTempShapes.add(new TileDrawable(t));
+ if (t.isReady) {
+ readyTiles++;
+ } else {
+ unreadyTiles++;
+ }
+ if (t.x < 0 || t.y < 0) {
+ unplacedTiles++;
+ }
+ }
+ mRenderStrings[0] = getResources().getString(R.string.format_stat_name,
+ getResources().getString(R.string.ready_tiles), readyTiles);
+ mRenderStrings[1] = getResources().getString(R.string.format_stat_name,
+ getResources().getString(R.string.unready_tiles), unreadyTiles);
+ mRenderStrings[2] = getResources().getString(R.string.format_stat_name,
+ getResources().getString(R.string.unplaced_tiles), unplacedTiles);
+
+ // draw view rect (using first two TileData objects)
+ ShapeDrawable viewShape = new ShapeDrawable();
+ viewShape.getPaint().setColor(0xff0000ff);
+ viewShape.setAlpha(64);
+ viewShape.setBounds(mProfData[frame][0].x, mProfData[frame][0].y,
+ mProfData[frame][1].x, mProfData[frame][1].y);
+ mTempShapes.add(viewShape);
+ this.invalidate();
+ return frame;
+ }
+
+ public void setData(TileData[][] tileProfilingData) {
+ mProfData = tileProfilingData;
+
+ mGraphs.setData(mProfData);
+ }
+}
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/ProfileActivity.java b/tests/TileBenchmark/src/com/test/tilebenchmark/ProfileActivity.java
new file mode 100644
index 0000000..23b6275
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/ProfileActivity.java
@@ -0,0 +1,203 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import android.app.Activity;
+import android.content.Intent;
+import android.content.Context;
+import android.graphics.Bitmap;
+import android.os.AsyncTask;
+import android.os.Bundle;
+import android.os.CountDownTimer;
+import android.util.Pair;
+import android.view.KeyEvent;
+import android.view.View;
+import android.view.View.OnClickListener;
+import android.webkit.WebSettings;
+import android.webkit.WebView;
+import android.webkit.WebViewClient;
+import android.widget.AdapterView;
+import android.widget.AdapterView.OnItemSelectedListener;
+import android.widget.ArrayAdapter;
+import android.widget.Button;
+import android.widget.EditText;
+import android.widget.Spinner;
+import android.widget.TextView;
+import android.widget.TextView.OnEditorActionListener;
+
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.ObjectOutputStream;
+
+/**
+ * Interface for profiling the webview's scrolling, with simple controls on how
+ * to scroll, and what content to load.
+ */
+public class ProfileActivity extends Activity {
+
+ public interface ProfileCallback {
+ public void profileCallback(TileData data[][]);
+ }
+
+ public static final String TEMP_FILENAME = "profile.tiles";
+ private static final int LOAD_TEST_DELAY = 2000; // nr of millis after load,
+ // before test
+
+ Button mInspectButton;
+ Spinner mVelocitySpinner;
+ EditText mUrl;
+ ProfiledWebView mWeb;
+ ProfileCallback mCallback;
+
+ private class VelocitySelectedListener implements OnItemSelectedListener {
+ @Override
+ public void onItemSelected(AdapterView<?> parent, View view,
+ int position, long id) {
+ String speedStr = parent.getItemAtPosition(position).toString();
+ int speedInt = Integer.parseInt(speedStr);
+ mWeb.setAutoScrollSpeed(speedInt);
+ }
+
+ @Override
+ public void onNothingSelected(AdapterView<?> parent) {
+ }
+ }
+
+ private class LoggingWebViewClient extends WebViewClient {
+ @Override
+ public boolean shouldOverrideUrlLoading(WebView view, String url) {
+ return false;
+ }
+
+ @Override
+ public void onPageStarted(WebView view, String url, Bitmap favicon) {
+ super.onPageStarted(view, url, favicon);
+ mUrl.setText(url);
+ }
+
+ @Override
+ public void onPageFinished(WebView view, String url) {
+ super.onPageFinished(view, url);
+ view.requestFocus();
+ new CountDownTimer(LOAD_TEST_DELAY, LOAD_TEST_DELAY) {
+ @Override
+ public void onTick(long millisUntilFinished) {
+ }
+
+ @Override
+ public void onFinish() {
+ mWeb.startScrollTest(mCallback);
+ }
+ }.start();
+ }
+ }
+
+ private class StoreFileTask extends
+ AsyncTask<Pair<String, TileData[][]>, Void, Void> {
+
+ @Override
+ protected Void doInBackground(Pair<String, TileData[][]>... params) {
+ try {
+ FileOutputStream fos = openFileOutput(params[0].first,
+ Context.MODE_PRIVATE);
+ ObjectOutputStream out = new ObjectOutputStream(fos);
+ out.writeObject(params[0].second);
+ out.close();
+ } catch (IOException ex) {
+ ex.printStackTrace();
+ }
+ return null;
+ }
+
+ @Override
+ protected void onPostExecute(Void v) {
+ mUrl.setBackgroundResource(R.color.finished_url);
+ }
+ }
+
+ /** Called when the activity is first created. */
+ @Override
+ public void onCreate(Bundle savedInstanceState) {
+ super.onCreate(savedInstanceState);
+ setContentView(R.layout.main);
+ mInspectButton = (Button) findViewById(R.id.inspect);
+ mVelocitySpinner = (Spinner) findViewById(R.id.velocity);
+ mUrl = (EditText) findViewById(R.id.url);
+ mWeb = (ProfiledWebView) findViewById(R.id.web);
+ mCallback = new ProfileCallback() {
+ @SuppressWarnings("unchecked")
+ @Override
+ public void profileCallback(TileData[][] data) {
+ new StoreFileTask().execute(new Pair<String, TileData[][]>(TEMP_FILENAME, data));
+ }
+ };
+
+ // Inspect button (opens PlaybackActivity)
+ mInspectButton.setOnClickListener(new OnClickListener() {
+ @Override
+ public void onClick(View v) {
+ startActivity(new Intent(ProfileActivity.this,
+ PlaybackActivity.class));
+ }
+ });
+
+ // Velocity spinner
+ ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(
+ this, R.array.velocity_array,
+ android.R.layout.simple_spinner_item);
+ adapter.setDropDownViewResource(
+ android.R.layout.simple_spinner_dropdown_item);
+ mVelocitySpinner.setAdapter(adapter);
+ mVelocitySpinner.setOnItemSelectedListener(
+ new VelocitySelectedListener());
+ mVelocitySpinner.setSelection(3);
+
+ // Custom profiling WebView
+ WebSettings settings = mWeb.getSettings();
+ settings.setJavaScriptEnabled(true);
+ settings.setSupportZoom(true);
+ settings.setEnableSmoothTransition(true);
+ settings.setBuiltInZoomControls(true);
+ settings.setLoadWithOverviewMode(true);
+ mWeb.setWebViewClient(new LoggingWebViewClient());
+
+ // URL text entry
+ mUrl.setOnEditorActionListener(new OnEditorActionListener() {
+ public boolean onEditorAction(TextView v, int actionId,
+ KeyEvent event) {
+ String url = mUrl.getText().toString();
+ mUrl.setBackgroundResource(R.color.unfinished_url);
+ mWeb.loadUrl(url);
+ mWeb.requestFocus();
+ return true;
+ }
+ });
+ }
+
+ public void setCallback(ProfileCallback callback) {
+ mCallback = callback;
+ }
+
+ @Override
+ public boolean onKeyDown(int keyCode, KeyEvent event) {
+ if ((keyCode == KeyEvent.KEYCODE_BACK) && mWeb.canGoBack()) {
+ mWeb.goBack();
+ return true;
+ }
+ return super.onKeyDown(keyCode, event);
+ }
+}
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/ProfiledWebView.java b/tests/TileBenchmark/src/com/test/tilebenchmark/ProfiledWebView.java
new file mode 100644
index 0000000..6560624
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/ProfiledWebView.java
@@ -0,0 +1,106 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import android.content.Context;
+import android.util.AttributeSet;
+import android.webkit.WebView;
+
+import com.test.tilebenchmark.ProfileActivity.ProfileCallback;
+
+public class ProfiledWebView extends WebView {
+ private int mSpeed;
+
+ private boolean isScrolling = false;
+ private ProfileCallback mCallback;
+
+ public ProfiledWebView(Context context) {
+ super(context);
+ }
+
+ public ProfiledWebView(Context context, AttributeSet attrs) {
+ super(context, attrs);
+ }
+
+ public ProfiledWebView(Context context, AttributeSet attrs, int defStyle) {
+ super(context, attrs, defStyle);
+ }
+
+ public ProfiledWebView(Context context, AttributeSet attrs, int defStyle,
+ boolean privateBrowsing) {
+ super(context, attrs, defStyle, privateBrowsing);
+ }
+
+ @Override
+ protected void onDraw(android.graphics.Canvas canvas) {
+ if (isScrolling) {
+ if (canScrollVertically(1)) {
+ scrollBy(0, mSpeed);
+ } else {
+ stopScrollTest();
+ isScrolling = false;
+ }
+ }
+ super.onDraw(canvas);
+ }
+
+ /*
+ * Called once the page is loaded to start scrolling for evaluating tiles
+ */
+ public void startScrollTest(ProfileCallback callback) {
+ isScrolling = true;
+ mCallback = callback;
+ super.tileProfilingStart();
+ invalidate();
+ }
+
+ /*
+ * Called once the page has stopped scrolling
+ */
+ public void stopScrollTest() {
+ float testRatio = super.tileProfilingStop();
+
+ TileData data[][] = new TileData[super.tileProfilingNumFrames()][];
+ for (int frame = 0; frame < data.length; frame++) {
+ data[frame] = new TileData[
+ super.tileProfilingNumTilesInFrame(frame)];
+ for (int tile = 0; tile < data[frame].length; tile++) {
+ int x = super.tileProfilingGetX(frame, tile);
+ int y = super.tileProfilingGetY(frame, tile);
+ boolean isReady = super.tileProfilingGetReady(frame, tile);
+ int level = super.tileProfilingGetLevel(frame, tile);
+
+ data[frame][tile] = new TileData(x, y, isReady, level);
+ }
+ }
+ super.tileProfilingClear();
+
+ mCallback.profileCallback(data);
+ }
+
+ @Override
+ public void loadUrl(String url) {
+ if (!url.startsWith("http://")) {
+ url = "http://" + url;
+ }
+ super.loadUrl(url);
+ }
+
+ public void setAutoScrollSpeed(int speedInt) {
+ mSpeed = speedInt;
+ }
+}
diff --git a/tests/TileBenchmark/src/com/test/tilebenchmark/TileData.java b/tests/TileBenchmark/src/com/test/tilebenchmark/TileData.java
new file mode 100644
index 0000000..7d4bb9f
--- /dev/null
+++ b/tests/TileBenchmark/src/com/test/tilebenchmark/TileData.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright (C) 2011 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.test.tilebenchmark;
+
+import java.io.Serializable;
+
+public class TileData implements Serializable {
+ public int x, y;
+ public boolean isReady;
+ public int level;
+
+ public TileData(int x, int y, boolean isReady, int level) {
+ this.x = x;
+ this.y = y;
+ this.isReady = isReady;
+ this.level = level;
+ }
+}
diff --git a/tools/aapt/AaptAssets.cpp b/tools/aapt/AaptAssets.cpp
index 29d2b87..b35878a 100644
--- a/tools/aapt/AaptAssets.cpp
+++ b/tools/aapt/AaptAssets.cpp
@@ -1560,10 +1560,10 @@
}
ssize_t AaptDir::slurpFullTree(Bundle* bundle, const String8& srcDir,
- const AaptGroupEntry& kind, const String8& resType)
+ const AaptGroupEntry& kind, const String8& resType,
+ sp<FilePathStore>& fullResPaths)
{
Vector<String8> fileNames;
-
{
DIR* dir = NULL;
@@ -1586,9 +1586,14 @@
if (isHidden(srcDir.string(), entry->d_name))
continue;
- fileNames.add(String8(entry->d_name));
+ String8 name(entry->d_name);
+ fileNames.add(name);
+ // Add fully qualified path for dependency purposes
+ // if we're collecting them
+ if (fullResPaths != NULL) {
+ fullResPaths->add(srcDir.appendPathCopy(name));
+ }
}
-
closedir(dir);
}
@@ -1615,7 +1620,7 @@
notAdded = true;
}
ssize_t res = subdir->slurpFullTree(bundle, pathName, kind,
- resType);
+ resType, fullResPaths);
if (res < NO_ERROR) {
return res;
}
@@ -1847,7 +1852,7 @@
sp<AaptDir> assetAaptDir = makeDir(String8(kAssetDir));
AaptGroupEntry group;
count = assetAaptDir->slurpFullTree(bundle, assetRoot, group,
- String8());
+ String8(), mFullResPaths);
if (count < 0) {
totalCount = count;
goto bail;
@@ -1878,6 +1883,7 @@
sp<AaptAssets> nextOverlay = new AaptAssets();
current->setOverlay(nextOverlay);
current = nextOverlay;
+ current->setFullResPaths(mFullResPaths);
}
count = current->slurpResourceTree(bundle, String8(res));
@@ -1920,7 +1926,7 @@
* guarantees about ordering, so we're okay with an inorder search
* using whatever order the OS happens to hand back to us.
*/
- count = slurpFullTree(bundle, assetRoot, AaptGroupEntry(), String8());
+ count = slurpFullTree(bundle, assetRoot, AaptGroupEntry(), String8(), mFullResPaths);
if (count < 0) {
/* failure; report error and remove archive */
totalCount = count;
@@ -1946,9 +1952,10 @@
ssize_t AaptAssets::slurpFullTree(Bundle* bundle, const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType)
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths)
{
- ssize_t res = AaptDir::slurpFullTree(bundle, srcDir, kind, resType);
+ ssize_t res = AaptDir::slurpFullTree(bundle, srcDir, kind, resType, fullResPaths);
if (res > 0) {
mGroupEntries.add(kind);
}
@@ -2010,7 +2017,7 @@
if (type == kFileTypeDirectory) {
sp<AaptDir> dir = makeDir(String8(entry->d_name));
ssize_t res = dir->slurpFullTree(bundle, subdirName, group,
- resType);
+ resType, mFullResPaths);
if (res < 0) {
count = res;
goto bail;
diff --git a/tools/aapt/AaptAssets.h b/tools/aapt/AaptAssets.h
index 65743d8..a1c7c40 100644
--- a/tools/aapt/AaptAssets.h
+++ b/tools/aapt/AaptAssets.h
@@ -140,6 +140,7 @@
}
class AaptGroup;
+class FilePathStore;
/**
* A single asset file we know about.
@@ -269,7 +270,8 @@
virtual ssize_t slurpFullTree(Bundle* bundle,
const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType);
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths);
/*
* Perform some sanity checks on the names of files and directories here.
@@ -484,6 +486,14 @@
ResourceTypeSet();
};
+// Storage for lists of fully qualified paths for
+// resources encountered during slurping.
+class FilePathStore : public RefBase,
+ public Vector<String8>
+{
+public:
+ FilePathStore();
+};
/**
* Asset hierarchy being operated on.
@@ -517,7 +527,8 @@
virtual ssize_t slurpFullTree(Bundle* bundle,
const String8& srcDir,
const AaptGroupEntry& kind,
- const String8& resType);
+ const String8& resType,
+ sp<FilePathStore>& fullResPaths);
ssize_t slurpResourceTree(Bundle* bundle, const String8& srcDir);
ssize_t slurpResourceZip(Bundle* bundle, const char* filename);
@@ -545,6 +556,10 @@
inline void
setResources(KeyedVector<String8, sp<ResourceTypeSet> >* res) { delete mRes; mRes = res; }
+ inline sp<FilePathStore>& getFullResPaths() { return mFullResPaths; }
+ inline void
+ setFullResPaths(sp<FilePathStore>& res) { mFullResPaths = res; }
+
private:
String8 mPackage;
SortedVector<AaptGroupEntry> mGroupEntries;
@@ -558,6 +573,8 @@
sp<AaptAssets> mOverlay;
KeyedVector<String8, sp<ResourceTypeSet> >* mRes;
+
+ sp<FilePathStore> mFullResPaths;
};
#endif // __AAPT_ASSETS_H
diff --git a/tools/aapt/Bundle.h b/tools/aapt/Bundle.h
index fa84e93..56fe524 100644
--- a/tools/aapt/Bundle.h
+++ b/tools/aapt/Bundle.h
@@ -41,11 +41,12 @@
mCompressionMethod(0), mOutputAPKFile(NULL),
mManifestPackageNameOverride(NULL), mInstrumentationPackageNameOverride(NULL),
mIsOverlayPackage(false),
- mAutoAddOverlay(false), mAssetSourceDir(NULL), mProguardFile(NULL),
+ mAutoAddOverlay(false), mGenDependencies(false),
+ mAssetSourceDir(NULL), mProguardFile(NULL),
mAndroidManifestFile(NULL), mPublicOutputFile(NULL),
mRClassDir(NULL), mResourceIntermediatesDir(NULL), mManifestMinSdkVersion(NULL),
mMinSdkVersion(NULL), mTargetSdkVersion(NULL), mMaxSdkVersion(NULL),
- mVersionCode(NULL), mVersionName(NULL), mCustomPackage(NULL),
+ mVersionCode(NULL), mVersionName(NULL), mCustomPackage(NULL), mExtraPackages(NULL),
mMaxResVersion(NULL), mDebugMode(false), mNonConstantId(false), mProduct(NULL),
mArgc(0), mArgv(NULL)
{}
@@ -97,6 +98,8 @@
void setIsOverlayPackage(bool val) { mIsOverlayPackage = val; }
bool getAutoAddOverlay() { return mAutoAddOverlay; }
void setAutoAddOverlay(bool val) { mAutoAddOverlay = val; }
+ bool getGenDependencies() { return mGenDependencies; }
+ void setGenDependencies(bool val) { mGenDependencies = val; }
/*
* Input options.
@@ -138,6 +141,8 @@
void setVersionName(const char* val) { mVersionName = val; }
const char* getCustomPackage() const { return mCustomPackage; }
void setCustomPackage(const char* val) { mCustomPackage = val; }
+ const char* getExtraPackages() const { return mExtraPackages; }
+ void setExtraPackages(const char* val) { mExtraPackages = val; }
const char* getMaxResVersion() const { return mMaxResVersion; }
void setMaxResVersion(const char * val) { mMaxResVersion = val; }
bool getDebugMode() { return mDebugMode; }
@@ -224,6 +229,7 @@
const char* mInstrumentationPackageNameOverride;
bool mIsOverlayPackage;
bool mAutoAddOverlay;
+ bool mGenDependencies;
const char* mAssetSourceDir;
const char* mProguardFile;
const char* mAndroidManifestFile;
@@ -243,6 +249,7 @@
const char* mVersionCode;
const char* mVersionName;
const char* mCustomPackage;
+ const char* mExtraPackages;
const char* mMaxResVersion;
bool mDebugMode;
bool mNonConstantId;
diff --git a/tools/aapt/Command.cpp b/tools/aapt/Command.cpp
index 7852197..903c62c 100644
--- a/tools/aapt/Command.cpp
+++ b/tools/aapt/Command.cpp
@@ -1508,6 +1508,8 @@
status_t err;
sp<AaptAssets> assets;
int N;
+ FILE* fp;
+ String8 dependencyFile;
// -c zz_ZZ means do pseudolocalization
ResourceFilter filter;
@@ -1542,6 +1544,13 @@
// Load the assets.
assets = new AaptAssets();
+
+ // Set up the resource gathering in assets if we're trying to make R.java
+ if (bundle->getGenDependencies()) {
+ sp<FilePathStore> pathStore = new FilePathStore;
+ assets->setFullResPaths(pathStore);
+ }
+
err = assets->slurpFromArgs(bundle);
if (err < 0) {
goto bail;
@@ -1551,7 +1560,7 @@
assets->print();
}
- // If they asked for any files that need to be compiled, do so.
+ // If they asked for any fileAs that need to be compiled, do so.
if (bundle->getResourceSourceDirs().size() || bundle->getAndroidManifestFile()) {
err = buildResources(bundle, assets);
if (err != 0) {
@@ -1565,10 +1574,29 @@
goto bail;
}
+ if (bundle->getGenDependencies()) {
+ dependencyFile = String8(bundle->getRClassDir());
+ // Make sure we have a clean dependency file to start with
+ dependencyFile.appendPath("R.d");
+ fp = fopen(dependencyFile, "w");
+ fclose(fp);
+ }
+
// Write out R.java constants
if (assets->getPackage() == assets->getSymbolsPrivatePackage()) {
if (bundle->getCustomPackage() == NULL) {
err = writeResourceSymbols(bundle, assets, assets->getPackage(), true);
+ // Copy R.java for libraries
+ if (bundle->getExtraPackages() != NULL) {
+ // Split on colon
+ String8 libs(bundle->getExtraPackages());
+ char* packageString = strtok(libs.lockBuffer(libs.length()), ":");
+ while (packageString != NULL) {
+ err = writeResourceSymbols(bundle, assets, String8(packageString), true);
+ packageString = strtok(NULL, ":");
+ }
+ libs.unlockBuffer();
+ }
} else {
const String8 customPkg(bundle->getCustomPackage());
err = writeResourceSymbols(bundle, assets, customPkg, true);
@@ -1587,6 +1615,19 @@
}
}
+ if (bundle->getGenDependencies()) {
+ // Now that writeResourceSymbols has taken care of writing the
+ // dependency targets to the dependencyFile, we'll write the
+ // pre-requisites.
+ fp = fopen(dependencyFile, "a+");
+ fprintf(fp, " : ");
+ err = writeDependencyPreReqs(bundle, assets, fp);
+
+ // Also manually add the AndroidManifeset since it's a non-asset
+ fprintf(fp, "%s \\\n", bundle->getAndroidManifestFile());
+ fclose(fp);
+ }
+
// Write out the ProGuard file
err = writeProguardFile(bundle, assets);
if (err < 0) {
diff --git a/tools/aapt/Main.cpp b/tools/aapt/Main.cpp
index 1e63131..8edb5b5 100644
--- a/tools/aapt/Main.cpp
+++ b/tools/aapt/Main.cpp
@@ -145,6 +145,10 @@
" inserts android:versionName in to manifest.\n"
" --custom-package\n"
" generates R.java into a different package.\n"
+ " --extra-packages\n"
+ " generate R.java for libraries. Separate libraries with ':'.\n"
+ " --generate-dependencies\n"
+ " generate a dependency file for R.java.\n"
" --auto-add-overlay\n"
" Automatically add resources that are only in overlays.\n"
" --rename-manifest-package\n"
@@ -475,6 +479,17 @@
goto bail;
}
bundle.setCustomPackage(argv[0]);
+ } else if (strcmp(cp, "-extra-packages") == 0) {
+ argc--;
+ argv++;
+ if (!argc) {
+ fprintf(stderr, "ERROR: No argument supplied for '--extra-packages' option\n");
+ wantUsage = true;
+ goto bail;
+ }
+ bundle.setExtraPackages(argv[0]);
+ } else if (strcmp(cp, "-generate-dependencies") == 0) {
+ bundle.setGenDependencies(true);
} else if (strcmp(cp, "-utf16") == 0) {
bundle.setWantUTF16(true);
} else if (strcmp(cp, "-rename-manifest-package") == 0) {
diff --git a/tools/aapt/Main.h b/tools/aapt/Main.h
index 3ba4f39..1df1144 100644
--- a/tools/aapt/Main.h
+++ b/tools/aapt/Main.h
@@ -46,4 +46,5 @@
String8 getAttribute(const ResXMLTree& tree, const char* ns,
const char* attr, String8* outError);
+status_t writeDependencyPreReqs(Bundle* bundle, const sp<AaptAssets>& assets, FILE* fp);
#endif // __MAIN_H
diff --git a/tools/aapt/Resource.cpp b/tools/aapt/Resource.cpp
index b4ac929..99e781d 100644
--- a/tools/aapt/Resource.cpp
+++ b/tools/aapt/Resource.cpp
@@ -51,6 +51,12 @@
{
}
+FilePathStore::FilePathStore()
+ :RefBase(),
+ Vector<String8>()
+{
+}
+
class ResourceDirIterator
{
public:
@@ -1917,6 +1923,16 @@
return err;
}
fclose(fp);
+
+ if (bundle->getGenDependencies()) {
+ // Add this R.java to the dependency file
+ String8 dependencyFile(bundle->getRClassDir());
+ dependencyFile.appendPath("R.d");
+
+ fp = fopen(dependencyFile.string(), "a");
+ fprintf(fp,"%s \\\n", dest.string());
+ fclose(fp);
+ }
}
return NO_ERROR;
@@ -2244,3 +2260,16 @@
return err;
}
+
+status_t
+writeDependencyPreReqs(Bundle* bundle, const sp<AaptAssets>& assets, FILE* fp)
+{
+ status_t deps = -1;
+ sp<FilePathStore> files = assets->getFullResPaths();
+ for (size_t file_i = 0; file_i < files->size(); ++file_i) {
+ // Add the full file path to the dependency file
+ fprintf(fp, "%s \\\n", files->itemAt(file_i).string());
+ deps++;
+ }
+ return deps;
+}