1845 FLAG_SET_DEFAULT(UseFastStosb, false);
1846 }
1847
1848 // For AMD Processors use XMM/YMM MOVDQU instructions
1849 // for Object Initialization as default
1850 if (is_amd() && cpu_family() >= 0x19) {
1851 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1852 UseFastStosb = false;
1853 }
1854 }
1855
1856 #ifdef COMPILER2
1857 if (is_intel() && MaxVectorSize > 16) {
1858 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1859 UseFastStosb = false;
1860 }
1861 }
1862 #endif
1863
1864 // Use XMM/YMM MOVDQU instruction for Object Initialization
1865 if (!UseFastStosb && UseSSE >= 2 && UseUnalignedLoadStores) {
1866 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1867 UseXMMForObjInit = true;
1868 }
1869 } else if (UseXMMForObjInit) {
1870 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1871 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1872 }
1873
1874 #ifdef COMPILER2
1875 if (FLAG_IS_DEFAULT(AlignVector)) {
1876 // Modern processors allow misaligned memory operations for vectors.
1877 AlignVector = !UseUnalignedLoadStores;
1878 }
1879 #endif // COMPILER2
1880
1881 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1882 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1883 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1884 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1885 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|
1845 FLAG_SET_DEFAULT(UseFastStosb, false);
1846 }
1847
1848 // For AMD Processors use XMM/YMM MOVDQU instructions
1849 // for Object Initialization as default
1850 if (is_amd() && cpu_family() >= 0x19) {
1851 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1852 UseFastStosb = false;
1853 }
1854 }
1855
1856 #ifdef COMPILER2
1857 if (is_intel() && MaxVectorSize > 16) {
1858 if (FLAG_IS_DEFAULT(UseFastStosb)) {
1859 UseFastStosb = false;
1860 }
1861 }
1862 #endif
1863
1864 // Use XMM/YMM MOVDQU instruction for Object Initialization
1865 if (UseSSE >= 2 && UseUnalignedLoadStores) {
1866 if (FLAG_IS_DEFAULT(UseXMMForObjInit)) {
1867 UseXMMForObjInit = true;
1868 }
1869 } else if (UseXMMForObjInit) {
1870 warning("UseXMMForObjInit requires SSE2 and unaligned load/stores. Feature is switched off.");
1871 FLAG_SET_DEFAULT(UseXMMForObjInit, false);
1872 }
1873
1874 #ifdef COMPILER2
1875 if (FLAG_IS_DEFAULT(AlignVector)) {
1876 // Modern processors allow misaligned memory operations for vectors.
1877 AlignVector = !UseUnalignedLoadStores;
1878 }
1879 #endif // COMPILER2
1880
1881 if (FLAG_IS_DEFAULT(AllocatePrefetchInstr)) {
1882 if (AllocatePrefetchInstr == 3 && !supports_3dnow_prefetch()) {
1883 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 0);
1884 } else if (!supports_sse() && supports_3dnow_prefetch()) {
1885 FLAG_SET_DEFAULT(AllocatePrefetchInstr, 3);
|